diff --git a/.changeset/gold-vans-complain.md b/.changeset/gold-vans-complain.md new file mode 100644 index 00000000..e0a90beb --- /dev/null +++ b/.changeset/gold-vans-complain.md @@ -0,0 +1,5 @@ +--- +"@openai/agents-openai": patch +--- + +fix: resolve #425 duplicate item error when using conversationId with tools diff --git a/packages/agents-openai/src/openaiResponsesModel.ts b/packages/agents-openai/src/openaiResponsesModel.ts index a8a3a001..16652d2e 100644 --- a/packages/agents-openai/src/openaiResponsesModel.ts +++ b/packages/agents-openai/src/openaiResponsesModel.ts @@ -424,8 +424,51 @@ function getPrompt(prompt: ModelRequest['prompt']): }; } +type InputArray = Exclude; + +const RESPONSE_ITEM_ID_PREFIXES = ['rs_', 'resp_', 'res_', 'msg_'] as const; + +function hasStoredConversationMetadata(item: InputArray[number]): boolean { + if (!item || typeof item !== 'object') { + return false; + } + + const providerData = (item as { providerData?: Record }) + .providerData; + if (providerData && typeof providerData === 'object') { + const responseId = + (providerData['response_id'] as string | undefined) ?? + (providerData['responseId'] as string | undefined); + if (typeof responseId === 'string' && responseId.length > 0) { + return true; + } + } + + const id = (item as { id?: unknown }).id; + if (typeof id === 'string') { + for (const prefix of RESPONSE_ITEM_ID_PREFIXES) { + if (id.startsWith(prefix)) { + return true; + } + } + } + + return false; +} + +function getLastStoredIndex(items: InputArray): number { + for (let i = items.length - 1; i >= 0; i--) { + if (hasStoredConversationMetadata(items[i])) { + return i; + } + } + + return -1; +} + function getInputItems( input: ModelRequest['input'], + conversationId?: string, ): OpenAI.Responses.ResponseInputItem[] { if (typeof input === 'string') { return [ @@ -436,7 +479,15 @@ function getInputItems( ]; } - return input.map((item) => { + let filteredInput: InputArray = input; + if (conversationId) { + const lastStoredIndex = getLastStoredIndex(input); + if (lastStoredIndex >= 0) { + filteredInput = input.slice(lastStoredIndex + 1); + } + } + + return filteredInput.map((item) => { if (isMessageItem(item)) { return getMessageItem(item); } @@ -847,7 +898,7 @@ export class OpenAIResponsesModel implements Model { ): Promise< Stream | OpenAI.Responses.Response > { - const input = getInputItems(request.input); + const input = getInputItems(request.input, request.conversationId); const { tools, include } = getTools(request.tools, request.handoffs); const toolChoice = getToolChoice(request.modelSettings.toolChoice); const { text, ...restOfProviderData } = diff --git a/packages/agents-openai/test/openaiResponsesModel.helpers.test.ts b/packages/agents-openai/test/openaiResponsesModel.helpers.test.ts index 012eaafd..ea9d703d 100644 --- a/packages/agents-openai/test/openaiResponsesModel.helpers.test.ts +++ b/packages/agents-openai/test/openaiResponsesModel.helpers.test.ts @@ -279,6 +279,49 @@ describe('getInputItems', () => { ] as any), ).toThrow(UserError); }); + + it('excludes stored items when conversationId is provided', () => { + const items = getInputItems( + [ + { role: 'user', content: 'hi' }, + { + type: 'function_call', + id: 'rs_123', + name: 'tool', + callId: 'call_1', + arguments: '{}', + status: 'in_progress', + providerData: { response_id: 'resp_1' }, + }, + { + type: 'function_call_result', + callId: 'call_1', + status: 'completed', + output: { type: 'text', text: 'done' }, + }, + ] as any, + 'conv_123', + ); + + expect(items).toHaveLength(1); + expect(items[0]).toMatchObject({ + type: 'function_call_output', + output: 'done', + }); + }); + + it('retains new items when no stored metadata exists', () => { + const items = getInputItems( + [{ role: 'user', content: 'hello' }] as any, + 'conv_123', + ); + + expect(items).toHaveLength(1); + expect(items[0]).toMatchObject({ + role: 'user', + content: 'hello', + }); + }); }); describe('convertToOutputItem', () => {