Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -418,6 +418,70 @@ describe("OpenAI Chat -> Responses converters", () => {
expect(toolResponse.tool_call_id).toBe(shortId);
});

it("prefers call_id over id when mapping function_call items", () => {
const req = {
model: "gpt-4o-mini",
input: [
{ role: "user", content: "Hello" },
{
type: "function_call",
id: "fc_1234567890abcdefghij1234567890abcdefghij1234567890",
call_id: "call_link_1",
name: "my_func",
arguments: "{}",
},
{ type: "function_call_output", call_id: "call_link_1", output: "result" },
],
} as unknown as ResponsesRequestBody;

const oai = toChatCompletions(req);

const assistant = oai.messages?.[1] as any;
const toolResponse = oai.messages?.[2] as any;

expect(assistant.tool_calls?.[0].id).toBe("call_link_1");
expect(toolResponse.tool_call_id).toBe("call_link_1");
});

it("maps responses text.format json_schema to chat response_format", () => {
const req = {
model: "gpt-4o-mini",
input: "hello",
text: {
format: {
type: "json_schema",
name: "weather_schema",
description: "Structured weather response",
schema: {
type: "object",
properties: {
temperature: { type: "number" },
},
required: ["temperature"],
},
strict: true,
},
},
} as unknown as ResponsesRequestBody;

const oai = toChatCompletions(req);
expect(oai.response_format).toEqual({
type: "json_schema",
json_schema: {
name: "weather_schema",
description: "Structured weather response",
schema: {
type: "object",
properties: {
temperature: { type: "number" },
},
required: ["temperature"],
},
strict: true,
},
});
});

it("maps Responses tools (flattened) to Chat tools (nested)", () => {
const req = {
model: "gpt-4o-mini",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,37 @@ function convertContentParts(
});
}

function mapTextFormatToChatResponseFormat(
textFormat: NonNullable<ResponsesRequestBody["text"]>["format"]
): HeliconeChatCreateParams["response_format"] {
if (!textFormat) {
return undefined;
}

if (textFormat.type === "json_schema" && "schema" in textFormat) {
return {
type: "json_schema",
json_schema: {
name: textFormat.name,
description: textFormat.description,
schema: textFormat.schema,
strict: textFormat.strict,
},
} as HeliconeChatCreateParams["response_format"];
}

return textFormat as HeliconeChatCreateParams["response_format"];
}

function resolveFunctionCallLinkId(
item: Extract<ResponsesInputItem, { type: "function_call" }>,
fallbackId: string
): string {
// `call_id` is the canonical linkage field used by function_call_output.
// Prefer it over opaque item `id` to avoid mismatches in multi-turn tool chains.
return item.call_id || item.id || fallbackId;
}

/**
* Collects consecutive items of a specific type from the input array.
* Returns the collected items and the index after the last collected item.
Expand Down Expand Up @@ -115,7 +146,7 @@ function convertInputToMessages(input: ResponsesRequestBody["input"]) {
>(input, i, "function_call");

const toolCalls = functionCalls.map((fc, idx) => ({
id: truncateToolCallId(fc.id || fc.call_id || `call_${i + idx}`),
id: truncateToolCallId(resolveFunctionCallLinkId(fc, `call_${i + idx}`)),
type: "function" as const,
function: {
name: fc.name,
Expand Down Expand Up @@ -267,6 +298,10 @@ export function toChatCompletions(
reasoning_effort = body.reasoning.effort === "minimal" ? "low" : body.reasoning.effort;
}

const responseFormat =
body.response_format ??
mapTextFormatToChatResponseFormat(body.text?.format);

const heliconeBody: HeliconeChatCreateParams = {
model: body.model,
messages,
Expand All @@ -285,7 +320,7 @@ export function toChatCompletions(
logit_bias: body.logit_bias,
logprobs: body.logprobs,
top_logprobs: body.top_logprobs,
response_format: body.response_format,
response_format: responseFormat,
seed: body.seed,
user: body.user,
service_tier: body.service_tier,
Expand Down
Loading