@@ -22,8 +22,11 @@ class OpenaiToResponse(BaseToResponse):
2222 def to_block_response (self , chat_id , chat_record_id , content , is_end , completion_tokens , prompt_tokens ,
2323 other_params : dict = None ,
2424 _status = status .HTTP_200_OK ):
25+ if other_params is None :
26+ other_params = {}
2527 data = ChatCompletion (id = chat_record_id , choices = [
2628 BlockChoice (finish_reason = 'stop' , index = 0 , chat_id = chat_id ,
29+ reasoning_content = other_params .get ('reasoning_content' , "" ),
2730 message = ChatCompletionMessage (role = 'assistant' , content = content ))],
2831 created = datetime .datetime .now ().second , model = '' , object = 'chat.completion' ,
2932 usage = CompletionUsage (completion_tokens = completion_tokens ,
@@ -32,11 +35,16 @@ def to_block_response(self, chat_id, chat_record_id, content, is_end, completion
3235 ).dict ()
3336 return JsonResponse (data = data , status = _status )
3437
35- def to_stream_chunk_response (self , chat_id , chat_record_id , node_id , up_node_id_list , content , is_end , completion_tokens ,
38+ def to_stream_chunk_response (self , chat_id , chat_record_id , node_id , up_node_id_list , content , is_end ,
39+ completion_tokens ,
3640 prompt_tokens , other_params : dict = None ):
41+ if other_params is None :
42+ other_params = {}
3743 chunk = ChatCompletionChunk (id = chat_record_id , model = '' , object = 'chat.completion.chunk' ,
38- created = datetime .datetime .now ().second ,choices = [
39- Choice (delta = ChoiceDelta (content = content , chat_id = chat_id ), finish_reason = 'stop' if is_end else None ,
44+ created = datetime .datetime .now ().second , choices = [
45+ Choice (delta = ChoiceDelta (content = content , reasoning_content = other_params .get ('reasoning_content' , "" ),
46+ chat_id = chat_id ),
47+ finish_reason = 'stop' if is_end else None ,
4048 index = 0 )],
4149 usage = CompletionUsage (completion_tokens = completion_tokens ,
4250 prompt_tokens = prompt_tokens ,
0 commit comments