Skip to content

Commit 9245e8a

Browse files
committed
update deepseek reasoner
1 parent 78a9cd3 commit 9245e8a

File tree

2 files changed

+10
-1
lines changed

2 files changed

+10
-1
lines changed

metagpt/provider/base_llm.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,10 @@ async def acompletion_text(
215215

216216
def get_choice_text(self, rsp: dict) -> str:
217217
"""Required to provide the first text of choice"""
218-
return rsp.get("choices")[0]["message"]["content"]
218+
message = rsp.get("choices")[0]["message"]
219+
if "reasoning_content" in message:
220+
self.reasoning_content = message["reasoning_content"]
221+
return message["content"]
219222

220223
def get_choice_delta_text(self, rsp: dict) -> str:
221224
"""Required to provide the first text of stream choice"""

metagpt/provider/openai_api.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,12 @@ async def _achat_completion_stream(self, messages: list[dict], timeout=USE_CONFI
9191
)
9292
usage = None
9393
collected_messages = []
94+
collected_reasoning_messages = []
9495
has_finished = False
9596
async for chunk in response:
97+
if hasattr(chunk.choices[0].delta, "reasoning_content"):
98+
collected_reasoning_messages.append(chunk.choices[0].delta.reasoning_content) # for deepseek
99+
continue
96100
chunk_message = chunk.choices[0].delta.content or "" if chunk.choices else "" # extract the message
97101
finish_reason = (
98102
chunk.choices[0].finish_reason if chunk.choices and hasattr(chunk.choices[0], "finish_reason") else None
@@ -118,6 +122,8 @@ async def _achat_completion_stream(self, messages: list[dict], timeout=USE_CONFI
118122

119123
log_llm_stream("\n")
120124
full_reply_content = "".join(collected_messages)
125+
if collected_reasoning_messages:
126+
self.reasoning_content = "".join(collected_reasoning_messages)
121127
if not usage:
122128
# Some services do not provide the usage attribute, such as OpenAI or OpenLLM
123129
usage = self._calc_usage(messages, full_reply_content)

0 commit comments

Comments
 (0)