Skip to content

Commit fc99390

Browse files
Fix Azure streaming annotation handling (#93)
* fix(openai): handle empty stream choices * Create twelve-emus-cheat.md
1 parent 574b7af commit fc99390

File tree

3 files changed

+89
-14
lines changed

3 files changed

+89
-14
lines changed

.changeset/twelve-emus-cheat.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@openai/agents-openai": patch
3+
---
4+
5+
Fix Azure streaming annotation handling

packages/agents-openai/src/openaiChatCompletionsStreaming.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ export async function* convertChatCompletionsStreamToResponses(
4343
// This is always set by the OpenAI API, but not by others e.g. LiteLLM
4444
usage = (chunk as any).usage || undefined;
4545

46-
if (!chunk.choices || !chunk.choices[0].delta) continue;
46+
if (!chunk.choices?.[0]?.delta) continue;
4747
const delta = chunk.choices[0].delta;
4848

4949
// Handle text

packages/agents-openai/test/openaiChatCompletionsStreaming.test.ts

Lines changed: 83 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
import { describe, it, expect } from 'vitest';
22
import { convertChatCompletionsStreamToResponses } from '../src/openaiChatCompletionsStreaming';
33
import { FAKE_ID } from '../src/openaiChatCompletionsModel';
4-
import type { ChatCompletion, ChatCompletionChunk } from 'openai/resources/chat';
4+
import type {
5+
ChatCompletion,
6+
ChatCompletionChunk,
7+
} from 'openai/resources/chat';
58

69
function makeChunk(delta: any, usage?: any) {
710
return {
@@ -14,7 +17,6 @@ function makeChunk(delta: any, usage?: any) {
1417
} as any;
1518
}
1619

17-
1820
describe('convertChatCompletionsStreamToResponses', () => {
1921
it('emits protocol events for streamed chat completions', async () => {
2022
const response: ChatCompletion = {
@@ -81,13 +83,23 @@ describe('convertChatCompletionsStreamToResponses', () => {
8183
}
8284

8385
const events = [] as any[];
84-
for await (const ev of convertChatCompletionsStreamToResponses(response, fakeStream() as any)) {
86+
for await (const ev of convertChatCompletionsStreamToResponses(
87+
response,
88+
fakeStream() as any,
89+
)) {
8590
events.push(ev);
8691
}
8792

88-
expect(events[0]).toEqual({ type: 'response_started', providerData: { ...chunk1 } });
93+
expect(events[0]).toEqual({
94+
type: 'response_started',
95+
providerData: { ...chunk1 },
96+
});
8997
expect(events[1]).toEqual({ type: 'model', event: chunk1 });
90-
expect(events[2]).toEqual({ type: 'output_text_delta', delta: 'hello', providerData: { ...chunk1 } });
98+
expect(events[2]).toEqual({
99+
type: 'output_text_delta',
100+
delta: 'hello',
101+
providerData: { ...chunk1 },
102+
});
91103
expect(events[3]).toEqual({ type: 'model', event: chunk2 });
92104
expect(events[4]).toEqual({ type: 'model', event: chunk3 });
93105

@@ -109,7 +121,11 @@ describe('convertChatCompletionsStreamToResponses', () => {
109121
type: 'message',
110122
status: 'completed',
111123
content: [
112-
{ type: 'output_text', text: 'hello', providerData: { annotations: [] } },
124+
{
125+
type: 'output_text',
126+
text: 'hello',
127+
providerData: { annotations: [] },
128+
},
113129
{ type: 'refusal', refusal: 'nope' },
114130
],
115131
},
@@ -126,29 +142,51 @@ describe('convertChatCompletionsStreamToResponses', () => {
126142
});
127143
});
128144

129-
130145
describe('convertChatCompletionsStreamToResponses', () => {
131146
it('converts chunks to protocol events', async () => {
132-
async function* stream(): AsyncGenerator<ChatCompletionChunk, void, unknown> {
147+
async function* stream(): AsyncGenerator<
148+
ChatCompletionChunk,
149+
void,
150+
unknown
151+
> {
133152
yield makeChunk({ content: 'he' });
134-
yield makeChunk({ content: 'llo' }, { prompt_tokens: 1, completion_tokens: 2, total_tokens: 3 });
135-
yield makeChunk({ tool_calls: [{ index: 0, id: 'call', function: { name: 'fn', arguments: 'a' } }] });
153+
yield makeChunk(
154+
{ content: 'llo' },
155+
{ prompt_tokens: 1, completion_tokens: 2, total_tokens: 3 },
156+
);
157+
yield makeChunk({
158+
tool_calls: [
159+
{ index: 0, id: 'call', function: { name: 'fn', arguments: 'a' } },
160+
],
161+
});
136162
}
137163

138164
const resp = { id: 'r' } as any;
139165
const events: any[] = [];
140-
for await (const e of convertChatCompletionsStreamToResponses(resp, stream() as any)) {
166+
for await (const e of convertChatCompletionsStreamToResponses(
167+
resp,
168+
stream() as any,
169+
)) {
141170
events.push(e);
142171
}
143172

144-
expect(events[0]).toEqual({ type: 'response_started', providerData: makeChunk({ content: 'he' }) });
173+
expect(events[0]).toEqual({
174+
type: 'response_started',
175+
providerData: makeChunk({ content: 'he' }),
176+
});
145177
// last event should be final response
146178
const final = events[events.length - 1];
147179
expect(final.type).toBe('response_done');
148180
expect(final.response.output).toEqual([
149181
{
150182
id: FAKE_ID,
151-
content: [{ text: 'hello', type: 'output_text', providerData: { annotations: [] } }],
183+
content: [
184+
{
185+
text: 'hello',
186+
type: 'output_text',
187+
providerData: { annotations: [] },
188+
},
189+
],
152190
role: 'assistant',
153191
type: 'message',
154192
status: 'completed',
@@ -163,4 +201,36 @@ describe('convertChatCompletionsStreamToResponses', () => {
163201
]);
164202
expect(final.response.usage.totalTokens).toBe(0);
165203
});
204+
205+
it('ignores chunks with empty choices', async () => {
206+
const emptyChunk: ChatCompletionChunk = {
207+
id: 'e',
208+
created: 0,
209+
model: 'm',
210+
object: 'chat.completion.chunk',
211+
choices: [],
212+
} as any;
213+
214+
async function* stream(): AsyncGenerator<
215+
ChatCompletionChunk,
216+
void,
217+
unknown
218+
> {
219+
yield emptyChunk;
220+
yield makeChunk({ content: 'hi' });
221+
}
222+
223+
const resp = { id: 'r' } as any;
224+
const events: any[] = [];
225+
for await (const e of convertChatCompletionsStreamToResponses(
226+
resp,
227+
stream() as any,
228+
)) {
229+
events.push(e);
230+
}
231+
232+
const deltas = events.filter((ev) => ev.type === 'output_text_delta');
233+
expect(deltas).toHaveLength(1);
234+
expect(deltas[0].delta).toBe('hi');
235+
});
166236
});

0 commit comments

Comments
 (0)