Skip to content

Commit 4818d5e

Browse files
authored
fix(usage): handle OpenAI snake_case usage fields (#298) (#303)
1 parent 8f12b76 commit 4818d5e

File tree

4 files changed

+72
-8
lines changed

4 files changed

+72
-8
lines changed

.changeset/cool-cloths-fry.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@openai/agents-openai': patch
3+
'@openai/agents-core': patch
4+
---
5+
6+
fix: support snake_case usage fields from OpenAI responses

packages/agents-core/src/usage.ts

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,15 @@
11
import { UsageData } from './types/protocol';
22

3+
type UsageInput = Partial<
4+
UsageData & {
5+
input_tokens: number;
6+
output_tokens: number;
7+
total_tokens: number;
8+
input_tokens_details: object;
9+
output_tokens_details: object;
10+
}
11+
> & { requests?: number };
12+
313
/**
414
* Tracks token usage and request counts for an agent run.
515
*/
@@ -34,7 +44,7 @@ export class Usage {
3444
*/
3545
public outputTokensDetails: Array<Record<string, number>> = [];
3646

37-
constructor(input?: Partial<UsageData> & { requests?: number }) {
47+
constructor(input?: UsageInput) {
3848
if (typeof input === 'undefined') {
3949
this.requests = 0;
4050
this.inputTokens = 0;
@@ -44,14 +54,18 @@ export class Usage {
4454
this.outputTokensDetails = [];
4555
} else {
4656
this.requests = input?.requests ?? 1;
47-
this.inputTokens = input?.inputTokens ?? 0;
48-
this.outputTokens = input?.outputTokens ?? 0;
49-
this.totalTokens = input?.totalTokens ?? 0;
50-
this.inputTokensDetails = input?.inputTokensDetails
51-
? [input.inputTokensDetails]
57+
this.inputTokens = input?.inputTokens ?? input?.input_tokens ?? 0;
58+
this.outputTokens = input?.outputTokens ?? input?.output_tokens ?? 0;
59+
this.totalTokens = input?.totalTokens ?? input?.total_tokens ?? 0;
60+
const inputTokensDetails =
61+
input?.inputTokensDetails ?? input?.input_tokens_details;
62+
this.inputTokensDetails = inputTokensDetails
63+
? [inputTokensDetails as Record<string, number>]
5264
: [];
53-
this.outputTokensDetails = input?.outputTokensDetails
54-
? [input.outputTokensDetails]
65+
const outputTokensDetails =
66+
input?.outputTokensDetails ?? input?.output_tokens_details;
67+
this.outputTokensDetails = outputTokensDetails
68+
? [outputTokensDetails as Record<string, number>]
5569
: [];
5670
}
5771
}

packages/agents-core/test/usage.test.ts

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,24 @@ describe('Usage', () => {
2626
expect(usage.totalTokens).toBe(15);
2727
});
2828

29+
it('falls back to snake_case fields', () => {
30+
const usage = new Usage({
31+
requests: 2,
32+
input_tokens: 7,
33+
output_tokens: 3,
34+
total_tokens: 10,
35+
input_tokens_details: { foo: 1 },
36+
output_tokens_details: { bar: 2 },
37+
});
38+
39+
expect(usage.requests).toBe(2);
40+
expect(usage.inputTokens).toBe(7);
41+
expect(usage.outputTokens).toBe(3);
42+
expect(usage.totalTokens).toBe(10);
43+
expect(usage.inputTokensDetails).toEqual([{ foo: 1 }]);
44+
expect(usage.outputTokensDetails).toEqual([{ bar: 2 }]);
45+
});
46+
2947
it('adds other Usage instances correctly', () => {
3048
const usageA = new Usage({
3149
inputTokens: 1,

packages/agents-openai/test/openaiChatCompletionsModel.test.ts

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,32 @@ describe('OpenAIChatCompletionsModel', () => {
7979
]);
8080
});
8181

82+
it('parses usage tokens from snake_case fields', async () => {
83+
const client = new FakeClient();
84+
const response = {
85+
id: 'r',
86+
choices: [{ message: { content: 'hi' } }],
87+
usage: { prompt_tokens: 11, completion_tokens: 7, total_tokens: 18 },
88+
} as any;
89+
client.chat.completions.create.mockResolvedValue(response);
90+
91+
const model = new OpenAIChatCompletionsModel(client as any, 'gpt');
92+
const req: any = {
93+
input: 'u',
94+
modelSettings: {},
95+
tools: [],
96+
outputType: 'text',
97+
handoffs: [],
98+
tracing: false,
99+
};
100+
101+
const result = await withTrace('t', () => model.getResponse(req));
102+
103+
expect(result.usage.inputTokens).toBe(11);
104+
expect(result.usage.outputTokens).toBe(7);
105+
expect(result.usage.totalTokens).toBe(18);
106+
});
107+
82108
it('outputs message when content is empty string', async () => {
83109
const client = new FakeClient();
84110
const response = {

0 commit comments

Comments
 (0)