@@ -146,6 +146,7 @@ def answer_query_stream(self, query):
146146 messages .extend (self .conversation_history )
147147
148148 try :
149+ logging .debug (f"Sending query to LLM: { normalized_query } " )
149150 stream = openai .ChatCompletion .create (
150151 model = os .getenv ("MODEL" ),
151152 messages = messages ,
@@ -159,23 +160,42 @@ def answer_query_stream(self, query):
159160
160161 collected_messages = []
161162 for chunk in stream :
162- logging .info (f"Received chunk: { chunk } " )
163- content = chunk ['choices' ][0 ]['delta' ].get ('content' , '' )
164- collected_messages .append (content )
165- yield content
166- if chunk ['choices' ][0 ].get ('finish_reason' ) is not None :
163+ try :
164+ logging .debug (f"Received chunk: { chunk } " )
165+ content = chunk ['choices' ][0 ]['delta' ].get ('content' , '' )
166+ collected_messages .append (content )
167+ yield content
168+ if chunk ['choices' ][0 ].get ('finish_reason' ) is not None :
169+ break
170+ except (BrokenPipeError , OSError ) as e :
171+ # Client disconnected, stop streaming
172+ logging .warning (f"Client disconnected during streaming: { e } " )
173+ traceback .print_exc (file = sys .stderr )
167174 break
168175
176+
177+ logging .debug (f"Finished receiving response: { normalized_query } " )
178+
169179 if len (citations ) > 0 :
170- yield "\n \n References:\n " + "\n " .join (citations )
180+ try :
181+ yield "\n \n References:\n " + "\n " .join (citations )
182+ except (BrokenPipeError , OSError ) as e :
183+ # Client disconnected, stop streaming
184+ logging .warning (f"Client disconnected during citations streaming: { e } " )
185+ traceback .print_exc (file = sys .stderr )
186+
171187
172188 full_response = '' .join (collected_messages ).strip ()
173189 self .conversation_history .append ({"role" : "assistant" , "content" : full_response })
174190
175191 except Exception as e :
176192 print (f"Error in answer_query_stream: { e } " , file = sys .stderr )
177193 traceback .print_exc (file = sys .stderr )
178- yield "An error occurred while generating the response."
194+ try :
195+ yield "An error occurred while generating the response."
196+ except (BrokenPipeError , OSError ):
197+ # Client disconnected, can't send error message
198+ logging .warning ("Client disconnected before error message could be sent" )
179199
180200 def clear_conversation_history (self ):
181201 self .conversation_history = []
0 commit comments