Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .changeset/grumpy-parks-stand.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
'@livekit/agents-plugin-google': patch
'@livekit/agents-plugin-openai': patch
'@livekit/agents-plugins-test': patch
'@livekit/agents': patch
---

bump openai to 6.x
4 changes: 2 additions & 2 deletions agents/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,13 @@
"heap-js": "^2.6.0",
"json-schema": "^0.4.0",
"livekit-server-sdk": "^2.13.3",
"openai": "^4.91.1",
"openai": "^6.8.1",
"pidusage": "^4.0.1",
"pino": "^8.19.0",
"pino-pretty": "^11.0.0",
"sharp": "0.34.3",
"uuid": "^11.1.0",
"ws": "^8.16.0",
"ws": "^8.18.0",
"zod-to-json-schema": "^3.24.6"
},
"peerDependencies": {
Expand Down
28 changes: 17 additions & 11 deletions agents/src/inference/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import {
APIStatusError,
APITimeoutError,
DEFAULT_API_CONNECT_OPTIONS,
type Expand,
toError,
} from '../index.js';
import * as llm from '../llm/index.js';
Expand Down Expand Up @@ -34,9 +35,10 @@ export type KimiModels = 'moonshotai/kimi-k2-instruct';

export type DeepSeekModels = 'deepseek-ai/deepseek-v3';

type ChatCompletionPredictionContentParam = OpenAI.Chat.Completions.ChatCompletionPredictionContent;
type WebSearchOptions = OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions;
type ToolChoice = OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice'];
type ChatCompletionPredictionContentParam =
Expand<OpenAI.Chat.Completions.ChatCompletionPredictionContent>;
type WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;
type ToolChoice = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams['tool_choice']>;
type Verbosity = 'low' | 'medium' | 'high';

export interface ChatCompletionOptions extends Record<string, unknown> {
Expand Down Expand Up @@ -180,9 +182,13 @@ export class LLM extends llm.LLM {
modelOptions.parallel_tool_calls = parallelToolCalls;
}

toolChoice = toolChoice !== undefined ? toolChoice : this.opts.modelOptions.tool_choice;
toolChoice =
toolChoice !== undefined
? toolChoice
: (this.opts.modelOptions.tool_choice as llm.ToolChoice | undefined);

if (toolChoice) {
modelOptions.tool_choice = toolChoice;
modelOptions.tool_choice = toolChoice as ToolChoice;
}

// TODO(AJS-270): Add response_format support here
Expand Down Expand Up @@ -238,7 +244,7 @@ export class LLMStream extends llm.LLMStream {
toolCtx?: llm.ToolContext;
gatewayOptions?: GatewayOptions;
connOptions: APIConnectOptions;
modelOptions: Record<string, any>;
modelOptions: Record<string, unknown>;
providerFmt?: llm.ProviderFormat;
},
) {
Expand Down Expand Up @@ -270,7 +276,7 @@ export class LLMStream extends llm.LLMStream {
description: func.description,
parameters: llm.toJsonSchema(
func.parameters,
) as unknown as OpenAI.Chat.Completions.ChatCompletionTool['function']['parameters'],
) as unknown as OpenAI.Chat.Completions.ChatCompletionFunctionTool['function']['parameters'],
},
}))
: undefined;
Expand Down Expand Up @@ -345,7 +351,7 @@ export class LLMStream extends llm.LLMStream {
options: {
statusCode: error.status,
body: error.error,
requestId: error.request_id,
requestId: error.requestID,
retryable,
},
});
Expand Down Expand Up @@ -387,10 +393,10 @@ export class LLMStream extends llm.LLMStream {
*
* Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role='assistant', tool_calls=None), finish_reason=None, index=0, logprobs=None)
* [ChoiceDeltaToolCall(index=0, id='call_LaVeHWUHpef9K1sd5UO8TtLg', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='{"location": "P', name=None), type=None)]
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris}', name=None), type=None)]
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='\{"location": "P', name=None), type=None)]
* [ChoiceDeltaToolCall(index=0, id=None, function=ChoiceDeltaToolCallFunction(arguments='aris\}', name=None), type=None)]
* [ChoiceDeltaToolCall(index=1, id='call_ThU4OmMdQXnnVmpXGOCknXIB', function=ChoiceDeltaToolCallFunction(arguments='', name='get_weather'), type='function')]
* [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='{"location": "T', name=None), type=None)]
* [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='\{"location": "T', name=None), type=None)]
* [ChoiceDeltaToolCall(index=1, id=None, function=ChoiceDeltaToolCallFunction(arguments='okyo', name=None), type=None)]
* Choice(delta=ChoiceDelta(content=None, function_call=None, refusal=None, role=None, tool_calls=None), finish_reason='tool_calls', index=0, logprobs=None)
*/
Expand Down
2 changes: 1 addition & 1 deletion agents/src/llm/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ export abstract class LLM extends (EventEmitter as new () => TypedEmitter<LLMCal
connOptions?: APIConnectOptions;
parallelToolCalls?: boolean;
toolChoice?: ToolChoice;
extraKwargs?: Record<string, any>;
extraKwargs?: Record<string, unknown>;
}): LLMStream;

/**
Expand Down
8 changes: 4 additions & 4 deletions agents/src/llm/provider_format/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ export interface GoogleFormatData {
export async function toChatCtx(
chatCtx: ChatContext,
injectDummyUserMessage: boolean = true,
): Promise<[Record<string, any>[], GoogleFormatData]> {
const turns: Record<string, any>[] = [];
): Promise<[Record<string, unknown>[], GoogleFormatData]> {
const turns: Record<string, unknown>[] = [];
const systemMessages: string[] = [];
let currentRole: string | null = null;
let parts: Record<string, any>[] = [];
let parts: Record<string, unknown>[] = [];

// Flatten all grouped tool calls to get individual messages
const itemGroups = groupToolCalls(chatCtx);
Expand Down Expand Up @@ -104,7 +104,7 @@ export async function toChatCtx(
];
}

async function toImagePart(image: ImageContent): Promise<Record<string, any>> {
async function toImagePart(image: ImageContent): Promise<Record<string, unknown>> {
const cacheKey = 'serialized_image';
if (!image._cache[cacheKey]) {
image._cache[cacheKey] = await serializeImage(image);
Expand Down
17 changes: 17 additions & 0 deletions agents/src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,23 @@ import { TransformStream, type TransformStreamDefaultController } from 'node:str
import { v4 as uuidv4 } from 'uuid';
import { log } from './log.js';

/**
* Recursively expands all nested properties of a type,
* resolving aliases so as to inspect the real shape in IDE.
*/
// eslint-disable-next-line @typescript-eslint/ban-types
export type Expand<T> = T extends Function
? T
: T extends object
? T extends Array<infer U>
? Array<Expand<U>>
: T extends Map<infer K, infer V>
? Map<Expand<K>, Expand<V>>
: T extends Set<infer M>
? Set<Expand<M>>
: { [K in keyof T]: Expand<T[K]> }
: T;
Comment on lines +23 to +33
Copy link
Contributor Author

@toubatbrian toubatbrian Nov 7, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For example:

type WebSearchOptions = Expand<OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions>;

When hover over WebSearchOptions in Cursor, it shows:

type WebSearchOptions = {
    search_context_size?: "low" | "medium" | "high" | undefined;
    user_location?: {
        approximate: {
            city?: string | undefined;
            country?: string | undefined;
            region?: string | undefined;
            timezone?: string | undefined;
        };
        type: "approximate";
    } | null | undefined;
}

instead of

type WebSearchOptions = OpenAI.Chat.Completions.ChatCompletionCreateParams.WebSearchOptions

This helps debug


/** Union of a single and a list of {@link AudioFrame}s */
export type AudioBuffer = AudioFrame[] | AudioFrame;

Expand Down
2 changes: 1 addition & 1 deletion examples/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
"@livekit/noise-cancellation-node": "^0.1.9",
"@livekit/rtc-node": "^0.13.11",
"livekit-server-sdk": "^2.13.3",
"ws": "^8.16.0"
"ws": "^8.18.0"
},
"peerDependencies": {
"zod": "^3.25.76 || ^4.1.8"
Expand Down
3 changes: 2 additions & 1 deletion plugins/google/src/llm.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ import { llm } from '@livekit/agents-plugins-test';
import { describe } from 'vitest';
import { LLM } from './llm.js';

describe.skip('Google', async () => {
describe('Google', async () => {
await llm(
new LLM({
model: 'gemini-2.5-flash',
temperature: 0,
}),
true,
);
});
4 changes: 2 additions & 2 deletions plugins/openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@
},
"dependencies": {
"@livekit/mutex": "^1.1.1",
"openai": "^4.91.1",
"ws": "^8.16.0"
"openai": "^6.8.1",
"ws": "^8.18.0"
},
"peerDependencies": {
"@livekit/agents": "workspace:*",
Expand Down
1 change: 1 addition & 0 deletions plugins/openai/src/llm.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@ describe('OpenAI', async () => {
new LLM({
temperature: 0,
}),
false,
);
});
4 changes: 2 additions & 2 deletions plugins/openai/src/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -445,9 +445,9 @@ export class LLM extends llm.LLM {
connOptions?: APIConnectOptions;
parallelToolCalls?: boolean;
toolChoice?: llm.ToolChoice;
extraKwargs?: Record<string, any>;
extraKwargs?: Record<string, unknown>;
}): LLMStream {
const extras: Record<string, any> = { ...extraKwargs }; // eslint-disable-line @typescript-eslint/no-explicit-any
const extras: Record<string, unknown> = { ...extraKwargs };

if (this.#opts.metadata) {
extras.metadata = this.#opts.metadata;
Expand Down
11 changes: 6 additions & 5 deletions plugins/test/src/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
// SPDX-License-Identifier: Apache-2.0
import { initializeLogger, llm as llmlib } from '@livekit/agents';
import { describe, expect, it } from 'vitest';
import { z } from 'zod';
import { z } from 'zod/v4';

const toolCtx: llmlib.ToolContext = {
getWeather: llmlib.tool({
Expand Down Expand Up @@ -58,8 +58,9 @@ const toolCtx: llmlib.ToolContext = {
}),
};

export const llm = async (llm: llmlib.LLM) => {
export const llm = async (llm: llmlib.LLM, isGoogle: boolean) => {
initializeLogger({ pretty: false });

describe('LLM', async () => {
it('should properly respond to chat', async () => {
const chatCtx = new llmlib.ChatContext();
Expand Down Expand Up @@ -120,7 +121,7 @@ export const llm = async (llm: llmlib.LLM) => {
expect(calls.length).toStrictEqual(1);
expect(JSON.parse(calls[0]!.args).unit).toStrictEqual('celsius');
});
it('should handle optional arguments', async () => {
it.skipIf(isGoogle)('should handle optional arguments', async () => {
const calls = await requestFncCall(
llm,
'Use a tool call to update the user info to name Theo. Leave email and address blank.',
Expand All @@ -129,8 +130,8 @@ export const llm = async (llm: llmlib.LLM) => {

expect(calls.length).toStrictEqual(1);
expect(JSON.parse(calls[0]!.args).name).toStrictEqual('Theo');
Comment on lines 131 to 132
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

google is still outputting NULL for .optional() schema, will add a ticket and investigate it.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I found this issue with .optional() and OpenAI as well openai/openai-node#1180, looks like a known limitation from the official docs: https://platform.openai.com/docs/guides/structured-outputs#all-fields-must-be-required

expect(JSON.parse(calls[0]!.args).email).toBeNull();
expect(JSON.parse(calls[0]!.args).address).toBeNull();
expect(JSON.parse(calls[0]!.args).email).toBeUndefined();
expect(JSON.parse(calls[0]!.args).address).toBeUndefined();
});
});
});
Expand Down
Loading