@@ -24,6 +24,29 @@ export interface RunOptionsStreamT extends RunOptionsBase {
2424 stream : true ;
2525}
2626
27+ export interface LlmOptionsBase {
28+ messages : PromptMessage [ ] ;
29+ model : string ;
30+ llmKey : string ;
31+ top_p ?: number ;
32+ max_tokens ?: number ;
33+ temperature ?: number ;
34+ presence_penalty ?: number ;
35+ frequency_penalty ?: number ;
36+ stop ?: string [ ] ;
37+ tool_choice ?: 'auto' | 'required' | ToolChoice ;
38+ parallel_tool_calls ?: boolean ;
39+ customModelParams ?: Record < string , any > ;
40+ }
41+
42+ export interface LlmOptionsT extends LlmOptionsBase {
43+ stream ?: false ;
44+ }
45+
46+ export interface LlmOptionsStreamT extends LlmOptionsBase {
47+ stream : true ;
48+ }
49+
2750interface ChoiceGenerate {
2851 index : number ;
2952 message : Message ;
@@ -91,6 +114,27 @@ export interface Message {
91114 tool_calls ?: ToolCall [ ] ;
92115}
93116
117+ // Message with proper content type for Vision support
118+ export interface PromptMessage {
119+ role : Role ;
120+ content : string | MessageContentType [ ] | null ;
121+ name ?: string ;
122+ tool_call_id ?: string ;
123+ tool_calls ?: ToolCall [ ] ;
124+ }
125+
126+ export interface MessageContentType {
127+ type : string ;
128+ text ?: string ;
129+ image_url ?: {
130+ url : string ;
131+ detail ?: string ;
132+ } ;
133+ cache_control ?: {
134+ type : 'ephemeral' ;
135+ } ;
136+ }
137+
94138export interface ThreadMessage extends Message {
95139 attachments ?: any [ ] ;
96140 metadata ?: Record < string , string > ;
@@ -506,6 +550,11 @@ export class Langbase {
506550 public chunk : ( options : ChunkOptions ) => Promise < ChunkResponse > ;
507551 public parse : ( options : ParseOptions ) => Promise < ParseResponse > ;
508552
553+ public llm : {
554+ ( options : LlmOptionsStreamT ) : Promise < RunResponseStream > ;
555+ ( options : LlmOptionsT ) : Promise < RunResponse > ;
556+ } ;
557+
509558 constructor ( options ?: LangbaseOptions ) {
510559 this . baseUrl = options ?. baseUrl ?? 'https://api.langbase.com' ;
511560 this . apiKey = options ?. apiKey ?? '' ;
@@ -584,6 +633,8 @@ export class Langbase {
584633 list : this . listThreadMessages . bind ( this ) ,
585634 } ,
586635 } ;
636+
637+ this . llm = this . runLlm . bind ( this ) ;
587638 }
588639
589640 private async runPipe (
@@ -1004,4 +1055,32 @@ export class Langbase {
10041055 endpoint : `/v1/threads/${ options . threadId } /messages` ,
10051056 } ) ;
10061057 }
1058+
1059+ // Add the private implementation
1060+ private async runLlm (
1061+ options : LlmOptionsStreamT ,
1062+ ) : Promise < RunResponseStream > ;
1063+
1064+ private async runLlm ( options : LlmOptionsT ) : Promise < RunResponse > ;
1065+
1066+ private async runLlm (
1067+ options : LlmOptionsT | LlmOptionsStreamT ,
1068+ ) : Promise < RunResponse | RunResponseStream > {
1069+ if ( ! options . llmKey ) {
1070+ throw new Error ( 'LLM API key is required to run this LLM.' ) ;
1071+ }
1072+
1073+ // Remove stream property if it's not set to true
1074+ if ( typeof options . stream === 'undefined' ) {
1075+ delete options . stream ;
1076+ }
1077+
1078+ return this . request . post ( {
1079+ endpoint : '/v1/llm' ,
1080+ body : options ,
1081+ headers : {
1082+ ...( options . llmKey && { 'LB-LLM-Key' : options . llmKey } ) ,
1083+ } ,
1084+ } ) ;
1085+ }
10071086}
0 commit comments