Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 79 additions & 0 deletions packages/langbase/src/langbase/langbase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,29 @@ export interface RunOptionsStreamT extends RunOptionsBase {
stream: true;
}

export interface LlmOptionsBase {
messages: PromptMessage[];
model: string;
llmKey: string;
top_p?: number;
max_tokens?: number;
temperature?: number;
presence_penalty?: number;
frequency_penalty?: number;
stop?: string[];
tool_choice?: 'auto' | 'required' | ToolChoice;
parallel_tool_calls?: boolean;
customModelParams?: Record<string, any>;
}

export interface LlmOptionsT extends LlmOptionsBase {
stream?: false;
}

export interface LlmOptionsStreamT extends LlmOptionsBase {
stream: true;
}

interface ChoiceGenerate {
index: number;
message: Message;
Expand Down Expand Up @@ -91,6 +114,27 @@ export interface Message {
tool_calls?: ToolCall[];
}

// Message with proper content type for Vision support
export interface PromptMessage {
role: Role;
content: string | MessageContentType[] | null;
name?: string;
tool_call_id?: string;
tool_calls?: ToolCall[];
}

export interface MessageContentType {
type: string;
text?: string;
image_url?: {
url: string;
detail?: string;
};
cache_control?: {
type: 'ephemeral';
};
}

export interface ThreadMessage extends Message {
attachments?: any[];
metadata?: Record<string, string>;
Expand Down Expand Up @@ -506,6 +550,11 @@ export class Langbase {
public chunk: (options: ChunkOptions) => Promise<ChunkResponse>;
public parse: (options: ParseOptions) => Promise<ParseResponse>;

public llm: {
(options: LlmOptionsStreamT): Promise<RunResponseStream>;
(options: LlmOptionsT): Promise<RunResponse>;
};

constructor(options?: LangbaseOptions) {
this.baseUrl = options?.baseUrl ?? 'https://api.langbase.com';
this.apiKey = options?.apiKey ?? '';
Expand Down Expand Up @@ -584,6 +633,8 @@ export class Langbase {
list: this.listThreadMessages.bind(this),
},
};

this.llm = this.runLlm.bind(this);
}

private async runPipe(
Expand Down Expand Up @@ -1004,4 +1055,32 @@ export class Langbase {
endpoint: `/v1/threads/${options.threadId}/messages`,
});
}

// Add the private implementation
private async runLlm(
options: LlmOptionsStreamT,
): Promise<RunResponseStream>;

private async runLlm(options: LlmOptionsT): Promise<RunResponse>;

private async runLlm(
options: LlmOptionsT | LlmOptionsStreamT,
): Promise<RunResponse | RunResponseStream> {
if (!options.llmKey) {
throw new Error('LLM API key is required to run this LLM.');
}

// Remove stream property if it's not set to true
if (typeof options.stream === 'undefined') {
delete options.stream;
}

return this.request.post({
endpoint: '/v1/llm',
body: options,
headers: {
...(options.llmKey && {'LB-LLM-Key': options.llmKey}),
},
});
}
}
Loading