Skip to content

Commit 28c7b6a

Browse files
feat: gpt-5-nano边界优化
1 parent 328ddf6 commit 28c7b6a

File tree

1 file changed

+40
-18
lines changed

1 file changed

+40
-18
lines changed

backend/service/summary_agent.py

Lines changed: 40 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -86,31 +86,53 @@ def generate_summary(messages: List[Dict[str, Any]], previous_summary: str = Non
8686
Keep summaries factual, objective, and efficient."""
8787

8888
# 获取配置
89-
config = get_config()
89+
config = get_config() or {}
9090

9191
# 创建OpenAI客户端
9292
client = OpenAI(
9393
base_url=config.get("openai_base_url") or LLM_DEFAULT_BASE_URL,
9494
api_key=config.get("openai_api_key") or get_comfyui_copilot_api_key() or ""
9595
)
9696

97-
# 调用LLM生成摘要
98-
completion = client.chat.completions.parse(
99-
model=config.get("model_select"),
100-
messages=[
101-
{"role": "system", "content": system_prompt},
102-
{"role": "user", "content": user_prompt}
103-
],
104-
max_tokens=8192, # 限制输出长度
105-
temperature=0.3, # 较低的温度确保摘要一致性
106-
response_format=SummaryResponse
107-
)
108-
109-
result = completion.choices[0].message.parsed
110-
log.info(f"Generated summary: {result.summary[:100]}...")
111-
112-
if result.summary:
113-
return result.summary
97+
# 确定使用的模型,如果没有指定则使用默认模型
98+
model_name = config.get("model_select") or WORKFLOW_MODEL_NAME
99+
100+
log.info(f"Generating summary with model: {model_name}")
101+
102+
try:
103+
# 调用LLM生成摘要
104+
completion = client.chat.completions.parse(
105+
model=model_name,
106+
messages=[
107+
{"role": "system", "content": system_prompt},
108+
{"role": "user", "content": user_prompt}
109+
],
110+
response_format=SummaryResponse
111+
)
112+
result = completion.choices[0].message.parsed
113+
summary_text = result.summary if result else ""
114+
115+
except TypeError as e:
116+
# 捕获特定的 NoneType 迭代错误,通常意味着模型不支持 Structured Outputs 或 SDK 内部处理响应出错
117+
if "'NoneType' object is not iterable" in str(e):
118+
log.warning(f"Structured Outputs failed for model {model_name} (TypeError: {e}). Falling back to standard chat completion.")
119+
120+
# 降级方案:使用普通的 create 方法,不带 response_format
121+
completion = client.chat.completions.create(
122+
model=model_name,
123+
messages=[
124+
{"role": "system", "content": system_prompt},
125+
{"role": "user", "content": user_prompt}
126+
],
127+
)
128+
summary_text = completion.choices[0].message.content
129+
else:
130+
raise e
131+
132+
log.info(f"Generated summary: {summary_text[:100]}..." if summary_text else "Generated empty summary")
133+
134+
if summary_text:
135+
return summary_text
114136
else:
115137
log.warning("Summary generation returned empty result")
116138
return ""

0 commit comments

Comments
 (0)