Skip to content

Commit 02a3ca4

Browse files
authored
Merge branch 'main' into add-custom-fetch-mcp-streamable-http
2 parents 7710742 + a0b1f3b commit 02a3ca4

File tree

14 files changed

+154
-34
lines changed

14 files changed

+154
-34
lines changed

.changeset/angry-cooks-wait.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
"@openai/agents-core": patch
3+
"@openai/agents-extensions": patch
4+
"@openai/agents-openai": patch
5+
---
6+
7+
Fix #233 - eliminate confusion with "input_text" type items with role: "assistant"

.changeset/brave-swans-smile.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@openai/agents-openai': patch
3+
---
4+
5+
Pass through strict flag for function tools when using completion

.changeset/cool-cloths-fry.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@openai/agents-openai': patch
3+
'@openai/agents-core': patch
4+
---
5+
6+
fix: support snake_case usage fields from OpenAI responses
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@openai/agents-realtime': patch
3+
---
4+
5+
fix(realtime-session): preserve audio format & other session config fields on agent update

docs/src/content/docs/extensions/ai-sdk.mdx

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ of supported models that can be brought into the Agents SDK through this adapter
3030
2. Choose your desired model package from the [Vercel's AI SDK](https://sdk.vercel.ai/docs/models/overview) and install it:
3131

3232
```bash
33-
npm install @ai-sdk/openai
33+
npm install @ai-sdk/openai@"^1.0.0"
3434
```
3535

3636
3. Import the adapter and model to connect to your agent:
@@ -48,6 +48,11 @@ of supported models that can be brought into the Agents SDK through this adapter
4848

4949
</Steps>
5050

51+
<Aside type="caution">
52+
Vercel's AI SDK has recently migrated to v2, however openai agent extensions is not yet compatible with v2.
53+
Therefore v1 versions of Vercel's AI SDK has to be installed.
54+
</Aside>
55+
5156
## Example
5257

5358
<Code lang="typescript" code={aiSdkSetupExample} title="AI SDK Setup" />

packages/agents-core/src/types/protocol.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,6 @@ export type ComputerAction = z.infer<typeof computerActions>;
241241
export const AssistantContent = z.discriminatedUnion('type', [
242242
OutputText,
243243
Refusal,
244-
InputText,
245244
AudioContent,
246245
ImageContent,
247246
]);

packages/agents-core/src/usage.ts

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,15 @@
11
import { UsageData } from './types/protocol';
22

3+
type UsageInput = Partial<
4+
UsageData & {
5+
input_tokens: number;
6+
output_tokens: number;
7+
total_tokens: number;
8+
input_tokens_details: object;
9+
output_tokens_details: object;
10+
}
11+
> & { requests?: number };
12+
313
/**
414
* Tracks token usage and request counts for an agent run.
515
*/
@@ -34,7 +44,7 @@ export class Usage {
3444
*/
3545
public outputTokensDetails: Array<Record<string, number>> = [];
3646

37-
constructor(input?: Partial<UsageData> & { requests?: number }) {
47+
constructor(input?: UsageInput) {
3848
if (typeof input === 'undefined') {
3949
this.requests = 0;
4050
this.inputTokens = 0;
@@ -44,14 +54,18 @@ export class Usage {
4454
this.outputTokensDetails = [];
4555
} else {
4656
this.requests = input?.requests ?? 1;
47-
this.inputTokens = input?.inputTokens ?? 0;
48-
this.outputTokens = input?.outputTokens ?? 0;
49-
this.totalTokens = input?.totalTokens ?? 0;
50-
this.inputTokensDetails = input?.inputTokensDetails
51-
? [input.inputTokensDetails]
57+
this.inputTokens = input?.inputTokens ?? input?.input_tokens ?? 0;
58+
this.outputTokens = input?.outputTokens ?? input?.output_tokens ?? 0;
59+
this.totalTokens = input?.totalTokens ?? input?.total_tokens ?? 0;
60+
const inputTokensDetails =
61+
input?.inputTokensDetails ?? input?.input_tokens_details;
62+
this.inputTokensDetails = inputTokensDetails
63+
? [inputTokensDetails as Record<string, number>]
5264
: [];
53-
this.outputTokensDetails = input?.outputTokensDetails
54-
? [input.outputTokensDetails]
65+
const outputTokensDetails =
66+
input?.outputTokensDetails ?? input?.output_tokens_details;
67+
this.outputTokensDetails = outputTokensDetails
68+
? [outputTokensDetails as Record<string, number>]
5569
: [];
5670
}
5771
}

packages/agents-core/test/usage.test.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,24 @@ describe('Usage', () => {
2626
expect(usage.totalTokens).toBe(15);
2727
});
2828

29+
it('falls back to snake_case fields', () => {
30+
const usage = new Usage({
31+
requests: 2,
32+
input_tokens: 7,
33+
output_tokens: 3,
34+
total_tokens: 10,
35+
input_tokens_details: { foo: 1 },
36+
output_tokens_details: { bar: 2 },
37+
});
38+
39+
expect(usage.requests).toBe(2);
40+
expect(usage.inputTokens).toBe(7);
41+
expect(usage.outputTokens).toBe(3);
42+
expect(usage.totalTokens).toBe(10);
43+
expect(usage.inputTokensDetails).toEqual([{ foo: 1 }]);
44+
expect(usage.outputTokensDetails).toEqual([{ bar: 2 }]);
45+
});
46+
2947
it('adds other Usage instances correctly', () => {
3048
const usageA = new Usage({
3149
inputTokens: 1,

packages/agents-extensions/src/aiSdk.ts

Lines changed: 8 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -116,29 +116,16 @@ export function itemsToLanguageV1Messages(
116116
messages.push({
117117
role,
118118
content: content
119-
.filter((c) => c.type === 'input_text' || c.type === 'output_text')
119+
.filter((c) => c.type === 'output_text')
120120
.map((c) => {
121121
const { providerData: contentProviderData } = c;
122-
if (c.type === 'output_text') {
123-
return {
124-
type: 'text',
125-
text: c.text,
126-
providerMetadata: {
127-
...(contentProviderData ?? {}),
128-
},
129-
};
130-
}
131-
if (c.type === 'input_text') {
132-
return {
133-
type: 'text',
134-
text: c.text,
135-
providerMetadata: {
136-
...(contentProviderData ?? {}),
137-
},
138-
};
139-
}
140-
const exhaustiveCheck = c satisfies never;
141-
throw new UserError(`Unknown content type: ${exhaustiveCheck}`);
122+
return {
123+
type: 'text',
124+
text: c.text,
125+
providerMetadata: {
126+
...(contentProviderData ?? {}),
127+
},
128+
};
142129
}),
143130
providerMetadata: {
144131
...(providerData ?? {}),

packages/agents-openai/src/openaiChatCompletionsConverter.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ export function extractAllAssistantContent(
3737
}
3838
const out: ChatCompletionAssistantMessageParam['content'] = [];
3939
for (const c of content) {
40-
if (c.type === 'output_text' || c.type === 'input_text') {
40+
if (c.type === 'output_text') {
4141
out.push({
4242
type: 'text',
4343
text: c.text,
@@ -277,6 +277,7 @@ export function toolToOpenAI(tool: SerializedTool): ChatCompletionTool {
277277
name: tool.name,
278278
description: tool.description || '',
279279
parameters: tool.parameters,
280+
strict: tool.strict,
280281
},
281282
};
282283
}

0 commit comments

Comments
 (0)