@@ -111,6 +111,15 @@ export function useChat({
111111 return null ;
112112 } ) ;
113113
114+ // Clean up abort controller on unmount
115+ useEffect ( ( ) => {
116+ return ( ) => {
117+ if ( abortController && ! abortController . signal . aborted ) {
118+ abortController . abort ( ) ;
119+ }
120+ } ;
121+ } , [ abortController ] ) ;
122+
114123 // Remote mode polling
115124 useEffect ( ( ) => {
116125 if ( ! isRemoteMode || ! remoteUrl || process . env . NODE_ENV === "test" ) return ;
@@ -158,6 +167,92 @@ export function useChat({
158167 }
159168 } , [ initialPrompt , isChatHistoryInitialized ] ) ;
160169
170+ const executeStreamingResponse = async (
171+ newHistory : ChatCompletionMessageParam [ ] ,
172+ currentCompactionIndex : number | null ,
173+ message : string ,
174+ ) => {
175+ // Clean up previous abort controller if it exists
176+ if ( abortController && ! abortController . signal . aborted ) {
177+ abortController . abort ( ) ;
178+ }
179+
180+ // Start streaming response
181+ const controller = new AbortController ( ) ;
182+ setAbortController ( controller ) ;
183+ setIsWaitingForResponse ( true ) ;
184+ setResponseStartTime ( Date . now ( ) ) ;
185+ setInputMode ( false ) ;
186+ logger . debug ( "Starting chat response stream" , {
187+ messageLength : message . length ,
188+ historyLength : newHistory . length ,
189+ } ) ;
190+
191+ try {
192+ const currentStreamingMessageRef = {
193+ current : null as DisplayMessage | null ,
194+ } ;
195+ const streamCallbacks = createStreamCallbacks (
196+ { setMessages, setActivePermissionRequest } ,
197+ currentStreamingMessageRef ,
198+ ) ;
199+
200+ // Execute streaming chat response
201+ await executeStreaming ( {
202+ newHistory,
203+ model,
204+ llmApi,
205+ controller,
206+ streamCallbacks,
207+ currentCompactionIndex,
208+ } ) ;
209+
210+ if (
211+ currentStreamingMessageRef . current &&
212+ currentStreamingMessageRef . current . content
213+ ) {
214+ const messageContent = currentStreamingMessageRef . current . content ;
215+ setMessages ( ( prev ) => [
216+ ...prev ,
217+ {
218+ role : "assistant" ,
219+ content : messageContent ,
220+ isStreaming : false ,
221+ } ,
222+ ] ) ;
223+ }
224+
225+ // Update the chat history with the complete conversation after streaming
226+ setChatHistory ( newHistory ) ;
227+ logger . debug ( "Chat history updated" , {
228+ finalHistoryLength : newHistory . length ,
229+ } ) ;
230+
231+ // Save the updated history to session
232+ logger . debug ( "Saving session" , { historyLength : newHistory . length } ) ;
233+ saveSession ( newHistory ) ;
234+ logger . debug ( "Session saved" ) ;
235+ } catch ( error : any ) {
236+ const errorMessage = `Error: ${ formatError ( error ) } ` ;
237+ setMessages ( ( prev ) => [
238+ ...prev ,
239+ {
240+ role : "system" ,
241+ content : errorMessage ,
242+ messageType : "system" as const ,
243+ } ,
244+ ] ) ;
245+ } finally {
246+ // Stop active time tracking
247+ telemetryService . stopActiveTime ( ) ;
248+
249+ setAbortController ( null ) ;
250+ setIsWaitingForResponse ( false ) ;
251+ setResponseStartTime ( null ) ;
252+ setInputMode ( true ) ;
253+ }
254+ } ;
255+
161256 const handleUserMessage = async ( message : string ) => {
162257 // Handle special commands
163258 const handled = await handleSpecialCommands ( {
@@ -247,80 +342,8 @@ export function useChat({
247342 setChatHistory ( newHistory ) ;
248343 setMessages ( ( prev ) => [ ...prev , { role : "user" , content : message } ] ) ;
249344
250- // Start streaming response
251- const controller = new AbortController ( ) ;
252- setAbortController ( controller ) ;
253- setIsWaitingForResponse ( true ) ;
254- setResponseStartTime ( Date . now ( ) ) ;
255- setInputMode ( false ) ;
256- logger . debug ( "Starting chat response stream" , {
257- messageLength : message . length ,
258- historyLength : newHistory . length ,
259- } ) ;
260-
261- try {
262- const currentStreamingMessageRef = {
263- current : null as DisplayMessage | null ,
264- } ;
265- const streamCallbacks = createStreamCallbacks (
266- { setMessages, setActivePermissionRequest } ,
267- currentStreamingMessageRef ,
268- ) ;
269-
270- // Execute streaming chat response
271- await executeStreaming ( {
272- newHistory,
273- model,
274- llmApi,
275- controller,
276- streamCallbacks,
277- currentCompactionIndex,
278- } ) ;
279-
280- if (
281- currentStreamingMessageRef . current &&
282- currentStreamingMessageRef . current . content
283- ) {
284- const messageContent = currentStreamingMessageRef . current . content ;
285- setMessages ( ( prev ) => [
286- ...prev ,
287- {
288- role : "assistant" ,
289- content : messageContent ,
290- isStreaming : false ,
291- } ,
292- ] ) ;
293- }
294-
295- // Update the chat history with the complete conversation after streaming
296- setChatHistory ( newHistory ) ;
297- logger . debug ( "Chat history updated" , {
298- finalHistoryLength : newHistory . length ,
299- } ) ;
300-
301- // Save the updated history to session
302- logger . debug ( "Saving session" , { historyLength : newHistory . length } ) ;
303- saveSession ( newHistory ) ;
304- logger . debug ( "Session saved" ) ;
305- } catch ( error : any ) {
306- const errorMessage = `Error: ${ formatError ( error ) } ` ;
307- setMessages ( ( prev ) => [
308- ...prev ,
309- {
310- role : "system" ,
311- content : errorMessage ,
312- messageType : "system" as const ,
313- } ,
314- ] ) ;
315- } finally {
316- // Stop active time tracking
317- telemetryService . stopActiveTime ( ) ;
318-
319- setAbortController ( null ) ;
320- setIsWaitingForResponse ( false ) ;
321- setResponseStartTime ( null ) ;
322- setInputMode ( true ) ;
323- }
345+ // Execute the streaming response
346+ await executeStreamingResponse ( newHistory , currentCompactionIndex , message ) ;
324347 } ;
325348
326349 const handleInterrupt = ( ) => {
0 commit comments