Skip to content

Commit 0d13415

Browse files
committed
fix(openai): fix tracing exception in model response getting method when tracing is disabled
1 parent 9e718c0 commit 0d13415

File tree

3 files changed

+88
-19
lines changed

3 files changed

+88
-19
lines changed

.changeset/seven-kings-drop.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'@openai/agents-openai': patch
3+
---
4+
5+
Fix tracing exception in model response getting method when tracing is disabled

packages/agents-openai/src/openaiResponsesModel.ts

Lines changed: 42 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import {
22
Model,
33
Usage,
4-
withResponseSpan,
54
createResponseSpan,
65
setCurrentSpan,
76
resetCurrentSpan,
@@ -876,32 +875,56 @@ export class OpenAIResponsesModel implements Model {
876875
* @returns A promise that resolves to the response from the model.
877876
*/
878877
async getResponse(request: ModelRequest): Promise<ModelResponse> {
879-
const response = await withResponseSpan(async (span) => {
878+
const span = request.tracing ? createResponseSpan() : undefined;
879+
880+
try {
881+
if (span) {
882+
span.start();
883+
setCurrentSpan(span);
884+
}
885+
880886
const response = await this.#fetchResponse(request, false);
881887

882-
if (request.tracing) {
888+
if (request.tracing && span) {
883889
span.spanData.response_id = response.id;
884890
span.spanData._input = request.input;
885891
span.spanData._response = response;
886892
}
887893

888-
return response;
889-
});
890-
891-
const output: ModelResponse = {
892-
usage: new Usage({
893-
inputTokens: response.usage?.input_tokens ?? 0,
894-
outputTokens: response.usage?.output_tokens ?? 0,
895-
totalTokens: response.usage?.total_tokens ?? 0,
896-
inputTokensDetails: { ...response.usage?.input_tokens_details },
897-
outputTokensDetails: { ...response.usage?.output_tokens_details },
898-
}),
899-
output: convertToOutputItem(response.output),
900-
responseId: response.id,
901-
providerData: response,
902-
};
894+
const output: ModelResponse = {
895+
usage: new Usage({
896+
inputTokens: response.usage?.input_tokens ?? 0,
897+
outputTokens: response.usage?.output_tokens ?? 0,
898+
totalTokens: response.usage?.total_tokens ?? 0,
899+
inputTokensDetails: { ...response.usage?.input_tokens_details },
900+
outputTokensDetails: { ...response.usage?.output_tokens_details },
901+
}),
902+
output: convertToOutputItem(response.output),
903+
responseId: response.id,
904+
providerData: response,
905+
};
903906

904-
return output;
907+
return output;
908+
} catch (error) {
909+
if (span) {
910+
span.setError({
911+
message: 'Error getting response',
912+
data: {
913+
error: request.tracing
914+
? String(error)
915+
: error instanceof Error
916+
? error.name
917+
: undefined,
918+
},
919+
});
920+
}
921+
throw error;
922+
} finally {
923+
if (span) {
924+
span.end();
925+
resetCurrentSpan();
926+
}
927+
}
905928
}
906929

907930
/**

packages/agents-openai/test/openaiResponsesModel.test.ts

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,47 @@ describe('OpenAIResponsesModel', () => {
7474
});
7575
});
7676

77+
it('getResponse should not throw tracing exception when tracing is disabled', async () => {
78+
const fakeResponse = {
79+
id: 'res1',
80+
usage: {
81+
input_tokens: 3,
82+
output_tokens: 4,
83+
total_tokens: 7,
84+
},
85+
output: [
86+
{
87+
id: 'test_id',
88+
type: 'message',
89+
status: 'completed',
90+
content: [{ type: 'output_text', text: 'hi' }],
91+
role: 'assistant',
92+
},
93+
],
94+
};
95+
const createMock = vi.fn().mockResolvedValue(fakeResponse);
96+
const fakeClient = {
97+
responses: { create: createMock },
98+
} as unknown as OpenAI;
99+
const model = new OpenAIResponsesModel(fakeClient, 'gpt-test');
100+
101+
const request = {
102+
systemInstructions: 'inst',
103+
input: 'hello',
104+
modelSettings: {},
105+
tools: [],
106+
outputType: 'text',
107+
handoffs: [],
108+
tracing: false,
109+
signal: undefined,
110+
};
111+
112+
await expect(model.getResponse(request as any)).resolves.not.toThrow();
113+
await expect(model.getResponse(request as any)).resolves.not.toThrow(
114+
'No existing trace found',
115+
);
116+
});
117+
77118
it('getStreamedResponse yields events and calls client with stream flag', async () => {
78119
withTrace('test', async () => {
79120
const fakeResponse = { id: 'res2', usage: {}, output: [] };

0 commit comments

Comments
 (0)