File tree Expand file tree Collapse file tree 1 file changed +30
-2
lines changed Expand file tree Collapse file tree 1 file changed +30
-2
lines changed Original file line number Diff line number Diff line change @@ -359,8 +359,36 @@ def generate_response():
359
359
if chunk ['message' ]['content' ]:
360
360
yield chunk ['message' ]['content' ]
361
361
362
- # Stream the response
363
- assistant_response = st .write_stream (generate_response ())
362
+ # Create a status placeholder
363
+ status_placeholder = st .empty ()
364
+
365
+ # Show spinner while waiting for first chunk
366
+ with status_placeholder .container ():
367
+ with st .spinner ("Thinking" ):
368
+ # Get the response generator
369
+ response_generator = generate_response ()
370
+ # Try to get the first chunk to exit spinner context
371
+ try :
372
+ first_chunk = next (response_generator )
373
+ has_first_chunk = True
374
+ except StopIteration :
375
+ has_first_chunk = False
376
+ first_chunk = ""
377
+
378
+ # Now stream outside the spinner context
379
+ if has_first_chunk :
380
+ # Clear the status placeholder and start streaming
381
+ status_placeholder .empty ()
382
+
383
+ def complete_stream ():
384
+ yield first_chunk
385
+ for chunk in response_generator :
386
+ yield chunk
387
+
388
+ assistant_response = st .write_stream (complete_stream ())
389
+ else :
390
+ status_placeholder .error ("No response received from the model." )
391
+ assistant_response = ""
364
392
365
393
# Add assistant response to chat history
366
394
add_message_to_current_chat ("assistant" , assistant_response )
You can’t perform that action at this time.
0 commit comments