@@ -215,43 +215,30 @@ def del_datasource(self, datasource: Union[Datasource, str]):
215215
216216 # self.knowledge_bases = updated.knowledge_bases
217217
218- def completion (self , message : str , stream : bool = False ) -> Union [str , Iterable [str ]]:
218+ def completion (self , message : str , stream : bool = False ) -> Union [str , Iterable [object ]]:
219219 """
220220 Call mind completion
221221
222222 :param message: input question
223223 :param stream: to enable stream mode
224224
225- :return: string if stream mode is off or a generator of strings if stream mode is on
225+ :return: string if stream mode is off or iterator of ChoiceDelta objects (by openai)
226226 """
227+ response = self .openai_client .chat .completions .create (
228+ model = self .name ,
229+ messages = [
230+ {'role' : 'user' , 'content' : message }
231+ ],
232+ stream = stream
233+ )
227234 if stream :
228- response = self .api .post_stream (
229- '/chat/completions' ,
230- data = {
231- 'model' : self .name ,
232- 'messages' : [
233- {'role' : 'user' , 'content' : message }
234- ],
235- 'stream' : stream
236- }
237- )
238235 return self ._stream_response (response )
239236 else :
240- response = self .api .post (
241- '/chat/completions' ,
242- data = {
243- 'model' : self .name ,
244- 'messages' : [
245- {'role' : 'user' , 'content' : message }
246- ],
247- 'stream' : stream
248- }
249- )
250- return response .json ()['choices' ][0 ]['message' ]['content' ]
237+ return response .choices [0 ].message .content
251238
252239 def _stream_response (self , response ):
253240 for chunk in response :
254- yield chunk [ ' choices' ] [0 ][ ' delta' ][ ' content' ]
241+ yield chunk . choices [0 ]. delta . content
255242
256243
257244class Minds :
0 commit comments