@@ -13,7 +13,7 @@ export interface RunOptionsBase {
1313 name ?: string ; // Pipe name for SDK,
1414 apiKey ?: string ; // pipe level key for SDK
1515 llmKey ?: string ; // LLM API key
16- json ?: boolean
16+ json ?: boolean ;
1717 memory ?: RuntimeMemory ;
1818}
1919
@@ -25,8 +25,9 @@ export interface RunOptionsStreamT extends RunOptionsBase {
2525 stream : true ;
2626}
2727
28- export interface LlmOptionsBase {
29- messages : PromptMessage [ ] ;
28+ export interface AgentRunOptionsBase {
29+ input : string | PromptMessage [ ] ;
30+ instructions ?: string | null ;
3031 model : string ;
3132 apiKey : string ;
3233 top_p ?: number ;
@@ -44,11 +45,11 @@ export interface LlmOptionsBase {
4445 customModelParams ?: Record < string , any > ;
4546}
4647
47- export interface LlmOptions extends LlmOptionsBase {
48+ export interface AgentRunOptions extends AgentRunOptionsBase {
4849 stream ?: false ;
4950}
5051
51- export interface LlmOptionsStream extends LlmOptionsBase {
52+ export interface AgentRunOptionsStream extends AgentRunOptionsBase {
5253 stream : true ;
5354}
5455
@@ -579,10 +580,10 @@ export class Langbase {
579580 public chunk : ( options : ChunkOptions ) => Promise < ChunkResponse > ;
580581 public parse : ( options : ParseOptions ) => Promise < ParseResponse > ;
581582
582- public llm : {
583+ public agent : {
583584 run : {
584- ( options : LlmOptionsStream ) : Promise < RunResponseStream > ;
585- ( options : LlmOptions ) : Promise < RunResponse > ;
585+ ( options : AgentRunOptionsStream ) : Promise < RunResponseStream > ;
586+ ( options : AgentRunOptions ) : Promise < RunResponse > ;
586587 } ;
587588 } ;
588589
@@ -665,8 +666,8 @@ export class Langbase {
665666 } ,
666667 } ;
667668
668- this . llm = {
669- run : this . runLlm . bind ( this ) ,
669+ this . agent = {
670+ run : this . runAgent . bind ( this ) ,
670671 } ;
671672 }
672673
@@ -1089,12 +1090,14 @@ export class Langbase {
10891090 }
10901091
10911092 // Add the private implementation
1092- private async runLlm ( options : LlmOptionsStream ) : Promise < RunResponseStream > ;
1093+ private async runAgent (
1094+ options : AgentRunOptionsStream ,
1095+ ) : Promise < RunResponseStream > ;
10931096
1094- private async runLlm ( options : LlmOptions ) : Promise < RunResponse > ;
1097+ private async runAgent ( options : AgentRunOptions ) : Promise < RunResponse > ;
10951098
1096- private async runLlm (
1097- options : LlmOptions | LlmOptionsStream ,
1099+ private async runAgent (
1100+ options : AgentRunOptions | AgentRunOptionsStream ,
10981101 ) : Promise < RunResponse | RunResponseStream > {
10991102 if ( ! options . apiKey ) {
11001103 throw new Error ( 'LLM API key is required to run this LLM.' ) ;
@@ -1106,7 +1109,7 @@ export class Langbase {
11061109 }
11071110
11081111 return this . request . post ( {
1109- endpoint : '/v1/llm /run' ,
1112+ endpoint : '/v1/agent /run' ,
11101113 body : options ,
11111114 headers : {
11121115 ...( options . apiKey && { 'LB-LLM-Key' : options . apiKey } ) ,
0 commit comments