1+ import type { OpenAI } from '../..' ;
2+ import { OpenAIError } from '../../core/error' ;
13import type {
24 ChatCompletion ,
35 ChatCompletionCreateParams ,
@@ -6,8 +8,6 @@ import type {
68 ChatCompletionTool ,
79 ChatCompletionToolMessageParam ,
810} from '../../resources/chat/completions' ;
9- import type { OpenAI } from '../..' ;
10- import { OpenAIError } from '../../core/error' ;
1111import type { BetaRunnableTool } from './BetaRunnableTool' ;
1212// BetaMessage, BetaMessageParam, BetaToolUnion, MessageCreateParams
1313// import { BetaMessageStream } from '../BetaMessageStream';
@@ -72,11 +72,6 @@ export class BetaToolRunner<Stream extends boolean> {
7272 this . #completion = promiseWithResolvers ( ) ;
7373 }
7474
75- get #firstChoiceInCurrentMessage( ) : ChatCompletionMessageParam | null {
76- const lastMessage = this . #state. params . messages [ this . #state. params . messages . length - 1 ] ;
77- return lastMessage ?? null ;
78- }
79-
8075 async * [ Symbol . asyncIterator ] ( ) : AsyncIterator <
8176 Stream extends true ?
8277 ChatCompletionStream // TODO: for now!
@@ -130,13 +125,16 @@ export class BetaToolRunner<Stream extends boolean> {
130125 }
131126
132127 // TODO: we should probably hit the user with a callback or somehow offer for them to choice between the choices
133- if ( ! this . #firstChoiceInCurrentMessage ) {
128+ if ( ! this . #message ) {
134129 throw new Error ( 'No choices found in message' ) ; // TODO: use better error
135130 }
136131
137132 if ( ! this . #mutated) {
138- // this.#state.params.messages.push({ role, content }); TODO: we want to add all
139- this . #state. params . messages . push ( this . #firstChoiceInCurrentMessage) ;
133+ const completion = await this . #message;
134+ // TODO: what if it is empty?
135+ if ( completion ?. choices && completion . choices . length > 0 && completion . choices [ 0 ] ! . message ) {
136+ this . #state. params . messages . push ( completion . choices [ 0 ] ! . message ) ;
137+ }
140138 }
141139
142140 const toolMessages = await this . #generateToolResponse( await this . #message) ;
@@ -230,11 +228,12 @@ export class BetaToolRunner<Stream extends boolean> {
230228 if ( this . #toolResponse !== undefined ) {
231229 return this . #toolResponse;
232230 }
233- this . #toolResponse = generateToolResponse (
231+ const toolsResponse = generateToolResponse (
234232 lastMessage ,
235233 this . #state. params . tools . filter ( ( tool ) : tool is BetaRunnableTool < any > => 'run' in tool ) ,
236234 ) ;
237- return this . #toolResponse;
235+ this . #toolResponse = toolsResponse ;
236+ return toolsResponse ;
238237 }
239238
240239 /**
@@ -349,7 +348,7 @@ async function generateToolResponse(
349348 if (
350349 ! lastMessage ||
351350 lastMessage . role !== 'assistant' ||
352- ! lastMessage . content ||
351+ // !lastMessage.content || TODO: openai doesn't give content at the same time. ensure this is really always true though
353352 typeof lastMessage . content === 'string'
354353 ) {
355354 return null ;
@@ -381,6 +380,10 @@ async function generateToolResponse(
381380
382381 try {
383382 let input = toolUse . function . arguments ;
383+ // TODO: is this always safe?
384+ if ( typeof input === 'string' ) {
385+ input = JSON . parse ( input ) ;
386+ }
384387 input = tool . parse ( input ) ;
385388
386389 const result = await tool . run ( input ) ;
0 commit comments