@@ -34,16 +34,20 @@ export interface LlmOptionsBase {
3434 presence_penalty ?: number ;
3535 frequency_penalty ?: number ;
3636 stop ?: string [ ] ;
37+ tools ?: Tools [ ] ;
3738 tool_choice ?: 'auto' | 'required' | ToolChoice ;
3839 parallel_tool_calls ?: boolean ;
40+ reasoning_effort ?: string | null ;
41+ max_completion_tokens ?: number ;
42+ response_format ?: ResponseFormat ;
3943 customModelParams ?: Record < string , any > ;
4044}
4145
42- export interface LlmOptionsT extends LlmOptionsBase {
46+ export interface LlmOptions extends LlmOptionsBase {
4347 stream ?: false ;
4448}
4549
46- export interface LlmOptionsStreamT extends LlmOptionsBase {
50+ export interface LlmOptionsStream extends LlmOptionsBase {
4751 stream : true ;
4852}
4953
@@ -135,6 +139,19 @@ export interface MessageContentType {
135139 } ;
136140}
137141
142+ export type ResponseFormat =
143+ | { type : 'text' }
144+ | { type : 'json_object' }
145+ | {
146+ type : 'json_schema' ;
147+ json_schema : {
148+ description ?: string ;
149+ name : string ;
150+ schema ?: Record < string , unknown > ;
151+ strict ?: boolean | null ;
152+ } ;
153+ } ;
154+
138155export interface ThreadMessage extends Message {
139156 attachments ?: any [ ] ;
140157 metadata ?: Record < string , string > ;
@@ -551,8 +568,10 @@ export class Langbase {
551568 public parse : ( options : ParseOptions ) => Promise < ParseResponse > ;
552569
553570 public llm : {
554- ( options : LlmOptionsStreamT ) : Promise < RunResponseStream > ;
555- ( options : LlmOptionsT ) : Promise < RunResponse > ;
571+ run : {
572+ ( options : LlmOptionsStream ) : Promise < RunResponseStream > ;
573+ ( options : LlmOptions ) : Promise < RunResponse > ;
574+ } ;
556575 } ;
557576
558577 constructor ( options ?: LangbaseOptions ) {
@@ -634,7 +653,9 @@ export class Langbase {
634653 } ,
635654 } ;
636655
637- this . llm = this . runLlm . bind ( this ) ;
656+ this . llm = {
657+ run : this . runLlm . bind ( this ) ,
658+ } ;
638659 }
639660
640661 private async runPipe (
@@ -1057,14 +1078,12 @@ export class Langbase {
10571078 }
10581079
10591080 // Add the private implementation
1060- private async runLlm (
1061- options : LlmOptionsStreamT ,
1062- ) : Promise < RunResponseStream > ;
1081+ private async runLlm ( options : LlmOptionsStream ) : Promise < RunResponseStream > ;
10631082
1064- private async runLlm ( options : LlmOptionsT ) : Promise < RunResponse > ;
1083+ private async runLlm ( options : LlmOptions ) : Promise < RunResponse > ;
10651084
10661085 private async runLlm (
1067- options : LlmOptionsT | LlmOptionsStreamT ,
1086+ options : LlmOptions | LlmOptionsStream ,
10681087 ) : Promise < RunResponse | RunResponseStream > {
10691088 if ( ! options . llmKey ) {
10701089 throw new Error ( 'LLM API key is required to run this LLM.' ) ;
@@ -1076,7 +1095,7 @@ export class Langbase {
10761095 }
10771096
10781097 return this . request . post ( {
1079- endpoint : '/v1/llm' ,
1098+ endpoint : '/v1/llm/run ' ,
10801099 body : options ,
10811100 headers : {
10821101 ...( options . llmKey && { 'LB-LLM-Key' : options . llmKey } ) ,
0 commit comments