Skip to content

Commit e62ec33

Browse files
committed
feat(agents): 增加模型重试中间件并调整递归限制
将递归限制从100增加到300以支持更深的调用链 添加ModelRetryMiddleware到聊天机器人中间件链 优化模型加载逻辑,支持openai和deepseek官方初始化
1 parent 425c5b1 commit e62ec33

File tree

3 files changed

+12
-3
lines changed

3 files changed

+12
-3
lines changed

src/agents/chatbot/graph.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from langchain.agents import create_agent
2+
from langchain.agents.middleware import ModelRetryMiddleware
23

34
from src.agents.common import BaseAgent, load_chat_model
45
from src.agents.common.mcp import MCP_SERVERS
@@ -53,6 +54,7 @@ async def get_graph(self, **kwargs):
5354
inject_attachment_context, # 附件上下文注入(LangChain 标准中间件)
5455
context_based_model, # 动态模型选择
5556
dynamic_tool_middleware, # 动态工具选择(支持 MCP 工具注册)
57+
ModelRetryMiddleware(), # 模型重试中间件
5658
],
5759
checkpointer=await self._get_checkpointer(),
5860
)

src/agents/common/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ async def stream_messages(self, messages: list[str], input_context=None, **kwarg
7474

7575
# 从 input_context 中提取 attachments(如果有)
7676
attachments = (input_context or {}).get("attachments", [])
77-
input_config = {"configurable": input_context, "recursion_limit": 100}
77+
input_config = {"configurable": input_context, "recursion_limit": 300}
7878

7979
async for msg, metadata in graph.astream(
8080
{"messages": messages, "attachments": attachments},

src/agents/common/models.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
import os
22
import traceback
33

4-
from langchain.chat_models import BaseChatModel
4+
from langchain.chat_models import BaseChatModel, init_chat_model
55
from pydantic import SecretStr
66

77
from src import config
88
from src.utils import get_docker_safe_url
9+
from src.utils.logging_config import logger
910

1011

1112
def load_chat_model(fully_specified_name: str, **kwargs) -> BaseChatModel:
@@ -26,7 +27,13 @@ def load_chat_model(fully_specified_name: str, **kwargs) -> BaseChatModel:
2627

2728
base_url = get_docker_safe_url(model_info.base_url)
2829

29-
if provider in ["deepseek", "dashscope"]:
30+
if provider in ["openai", "deepseek"]:
31+
model_spec = f"{provider}:{model}"
32+
logger.debug(f"[offical] Loading model {model_spec} with kwargs {kwargs}")
33+
return init_chat_model(model_spec, **kwargs)
34+
35+
36+
elif provider in ["dashscope"]:
3037
from langchain_deepseek import ChatDeepSeek
3138

3239
return ChatDeepSeek(

0 commit comments

Comments
 (0)