Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 75 additions & 12 deletions src/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,70 @@ const buildIgnoredWarnings = (record: Record<string, unknown>, usedKeys: Readonl
return Array.from(warnings);
};

type PassthroughToolSchemaKey = "tools" | "tool_choice" | "parallel_tool_calls" | "prompt_cache_key" | "text" | "include";

const normalizeCodexToolChoice = (value: unknown): unknown => {
if (!isRecord(value)) return value;
if (getString(value.type) !== "function") return value;

const normalized: Record<string, unknown> = { ...value };
const topLevelName = getString(normalized.name);
const fn = isRecord(value.function) ? value.function : null;
if (!fn && !topLevelName) return value;

if (!topLevelName) {
const functionName = getString(fn?.name);
if (!functionName) return value;
normalized.name = functionName;
}

delete normalized.function;
return normalized;
};

const normalizeCodexTools = (value: unknown): unknown => {
if (!Array.isArray(value)) return value;
return value.map((tool) => {
if (!isRecord(tool)) return tool;
if (getString(tool.type) !== "function") return tool;
const nestedFunction = isRecord(tool.function) ? tool.function : null;
if (!nestedFunction) return tool;

const normalized: Record<string, unknown> = { ...tool };
const topLevelName = getString(normalized.name);
const nestedName = getString(nestedFunction.name);
if (!topLevelName && !nestedName) return tool;

if (!topLevelName) {
normalized.name = nestedName;
}
for (const [key, nestedValue] of Object.entries(nestedFunction)) {
if (key in normalized) continue;
normalized[key] = nestedValue;
}
delete normalized.function;
return normalized;
});
};

const normalizePassthroughForCodex = (key: PassthroughToolSchemaKey, value: unknown): unknown => {
if (key === "tools") return normalizeCodexTools(value);
if (key === "tool_choice") return normalizeCodexToolChoice(value);
return value;
};

const applyPassthroughToCodexRequest = (
codexBody: Record<string, unknown>,
rawRecord: Record<string, unknown>,
keys: readonly PassthroughToolSchemaKey[],
): void => {
for (const key of keys) {
if (Object.prototype.hasOwnProperty.call(rawRecord, key)) {
codexBody[key] = normalizePassthroughForCodex(key, rawRecord[key]);
}
}
};

const withUosWarning = (response: Response, warnings: string[]): Response => {
if (!warnings.length) return response;
const headers = new Headers(response.headers);
Expand Down Expand Up @@ -2375,12 +2439,8 @@ export const handleChatCompletions = async (req: Request, usageContext?: UsageCo
reasoning: reasoningValue,
instructions,
});
const passthroughKeys = ["tools", "tool_choice", "parallel_tool_calls", "prompt_cache_key"];
for (const key of passthroughKeys) {
if (Object.prototype.hasOwnProperty.call(rawRecord, key)) {
codexBody[key] = rawRecord[key];
}
}
const passthroughKeys: PassthroughToolSchemaKey[] = ["tools", "tool_choice", "parallel_tool_calls", "prompt_cache_key"];
applyPassthroughToCodexRequest(codexBody, rawRecord, passthroughKeys);
codexBody.store = false;

const stream = Boolean(body.stream);
Expand Down Expand Up @@ -2550,12 +2610,15 @@ export const handleResponses = async (req: Request, usageContext?: UsageContext)
}

const codexBody = await buildCodexRequest(model, input, { reasoning: reasoningValue, instructions });
const passthroughKeys = ["tools", "tool_choice", "parallel_tool_calls", "prompt_cache_key", "text", "include"];
for (const key of passthroughKeys) {
if (Object.prototype.hasOwnProperty.call(rawRecord, key)) {
codexBody[key] = rawRecord[key];
}
}
const passthroughKeys: PassthroughToolSchemaKey[] = [
"tools",
"tool_choice",
"parallel_tool_calls",
"prompt_cache_key",
"text",
"include",
];
applyPassthroughToCodexRequest(codexBody, rawRecord, passthroughKeys);
codexBody.model = model;
codexBody.input = input;
codexBody.stream = true;
Expand Down
110 changes: 110 additions & 0 deletions tests/openai-compat.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,116 @@ Deno.test("openai: defaults + ignore temperature", async (t) => {
});
});

Deno.test("openai: normalize function-style tools for codex compatibility", async (t) => {
await t.step("chat completions flattens tools and tool_choice", async () => {
let recordedBody: Record<string, unknown> | null = null;

const response = await withFetchMock(
(_url, bodyText) => {
recordedBody = bodyText ? JSON.parse(bodyText) as Record<string, unknown> : null;
return sseResponse(baseSseChunks());
},
() =>
handleChatCompletions(
new Request("https://ai.ubq.fi/v1/chat/completions", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
messages: [{ role: "user", content: "get weather" }],
tools: [
{
type: "function",
function: {
name: "fetch_weather",
description: "Fetch weather for a city.",
parameters: { type: "object", properties: { city: { type: "string" } } },
},
},
{
type: "function",
name: "legacy_tool",
description: "Already top-level tool name.",
parameters: { type: "object", properties: {} },
function: { strict: true },
},
],
tool_choice: {
type: "function",
name: "forced_choice",
function: { name: "fetch_weather", strict: true },
},
}),
}),
),
);

assert.equal(response.status, 200);
assert.ok(recordedBody);
const recorded = recordedBody as Record<string, unknown>;
const recordedTools = recorded["tools"] as Array<Record<string, unknown>> | undefined;
assert.ok(Array.isArray(recordedTools));
assert.equal(recordedTools.length, 2);
assert.equal(recordedTools[0]?.name, "fetch_weather");
assert.equal(recordedTools[1]?.name, "legacy_tool");
assert.equal(recordedTools[0]?.description, "Fetch weather for a city.");
assert.deepEqual(recordedTools[0]?.parameters, {
type: "object",
properties: { city: { type: "string" } },
});
assert.equal(recordedTools[1]?.strict, true);
assert.equal(Object.prototype.hasOwnProperty.call(recordedTools[0], "function"), false);
assert.equal(Object.prototype.hasOwnProperty.call(recordedTools[1], "function"), false);
const recordedToolChoice = recorded["tool_choice"] as Record<string, unknown> | undefined;
assert.ok(recordedToolChoice);
assert.equal(recordedToolChoice.type, "function");
assert.equal(recordedToolChoice["name"], "forced_choice");
assert.equal(Object.prototype.hasOwnProperty.call(recordedToolChoice, "strict"), false);
assert.equal(Object.prototype.hasOwnProperty.call(recordedToolChoice, "function"), false);
});

await t.step("responses flattens tools", async () => {
let recordedBody: Record<string, unknown> | null = null;

const response = await withFetchMock(
(_url, bodyText) => {
recordedBody = bodyText ? JSON.parse(bodyText) as Record<string, unknown> : null;
return sseResponse(baseSseChunks());
},
() =>
handleResponses(
new Request("https://ai.ubq.fi/v1/responses", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
input: "get weather",
tools: [
{
type: "function",
function: {
name: "fetch_weather",
description: "Fetch weather for a city.",
parameters: { type: "object", properties: { city: { type: "string" } } },
},
},
],
}),
}),
),
);

assert.equal(response.status, 200);
assert.ok(recordedBody);
const recorded = recordedBody as Record<string, unknown>;
const recordedTools = recorded["tools"] as Array<Record<string, unknown>> | undefined;
assert.ok(Array.isArray(recordedTools));
assert.equal(recordedTools.length, 1);
assert.equal(recordedTools[0]?.name, "fetch_weather");
assert.equal(recordedTools[0]?.description, "Fetch weather for a city.");
assert.deepEqual(recordedTools[0]?.parameters, { type: "object", properties: { city: { type: "string" } } });
assert.equal(Object.prototype.hasOwnProperty.call(recordedTools[0], "function"), false);
});
});

Deno.test("openai: chat completions accept system-only messages", async () => {
let recordedBody: Record<string, unknown> | null = null;

Expand Down
Loading