Skip to content

Commit a1fdcbc

Browse files
committed
style: ruff format
1 parent 92a110d commit a1fdcbc

File tree

17 files changed

+115
-107
lines changed

17 files changed

+115
-107
lines changed

Makefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ lint:
2727
format:
2828
uv run ruff format .
2929
uv run ruff check . --fix
30+
uv run python -m ruff check --select I src --fix
3031

3132
format_diff:
3233
uv run ruff format --diff .

server/routers/chat_router.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,8 @@ async def get_agent(current_user: User = Depends(get_required_user)):
112112
metadata = yaml.safe_load(f)
113113
return {"agents": agents, "metadata": metadata}
114114

115-
#TODO:[未完成]这个thread_id在前端是直接生成的1234,最好传入thread_id时做校验只允许uuid4
115+
116+
# TODO:[未完成]这个thread_id在前端是直接生成的1234,最好传入thread_id时做校验只允许uuid4
116117
@chat.post("/agent/{agent_id}")
117118
async def chat_agent(
118119
agent_id: str,
@@ -254,7 +255,8 @@ async def save_messages_from_langgraph_state(
254255
logger.error(f"Error saving messages from LangGraph state: {e}")
255256
logger.error(traceback.format_exc())
256257

257-
#TODO:[功能建议]针对需要人工审批后再执行的工具,可以使用langgraph的interrupt方法中断对话,等待用户输入后再使用command跳转回去
258+
# TODO:[功能建议]针对需要人工审批后再执行的工具,
259+
# 可以使用langgraph的interrupt方法中断对话,等待用户输入后再使用command跳转回去
258260
async def stream_messages():
259261
# 代表服务端已经收到了请求
260262
yield make_chunk(status="init", meta=meta, msg=HumanMessage(content=query).model_dump())

server/utils/lifespan.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
1-
import asyncio
21
from contextlib import asynccontextmanager
32

43
from fastapi import FastAPI
54

65
from server.services import tasker
76

8-
#TODO:[已完成]使用lifespan进行统一生命周期管理
7+
# TODO:[已完成]使用lifespan进行统一生命周期管理
8+
99

1010
@asynccontextmanager
1111
async def lifespan(app: FastAPI):

server/utils/singleton.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
from threading import Lock
22

3+
34
class SingletonMeta(type):
45
"""
56
This is a thread-safe implementation of Singleton.
67
"""
8+
79
_instances = {}
810
_lock: Lock = Lock()
911

@@ -12,4 +14,4 @@ def __call__(cls, *args, **kwargs):
1214
if cls not in cls._instances:
1315
instance = super().__call__(*args, **kwargs)
1416
cls._instances[cls] = instance
15-
return cls._instances[cls]
17+
return cls._instances[cls]

src/agents/chatbot/tools.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@
99
from src.storage.minio import upload_image_to_minio
1010
from src.utils import logger
1111

12-
#TODO:[已完成]修改了tool定义的示例,使用更符合langgraph调用的方式
13-
@tool(name_or_callable="全能计算器",description="可以对给定的2个数字选择进行加减乘除四种计算")
12+
13+
# TODO:[已完成]修改了tool定义的示例,使用更符合langgraph调用的方式
14+
@tool(name_or_callable="全能计算器", description="可以对给定的2个数字选择进行加减乘除四种计算")
1415
def calculator(a: float, b: float, operation: str) -> float:
1516
"""
1617
可以对给定的2个数字选择进行加减乘除四种计算

src/agents/common/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
from __future__ import annotations
22

33
import os
4-
from pathlib import Path
54
from abc import abstractmethod
5+
from pathlib import Path
66

7-
from langgraph.graph.state import CompiledStateGraph
87
from langgraph.checkpoint.memory import InMemorySaver
98
from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver, aiosqlite
9+
from langgraph.graph.state import CompiledStateGraph
1010

1111
from src import config as sys_config
1212
from src.agents.common.context import BaseContext

src/agents/common/tools.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from src.utils import logger
1212

1313

14-
@tool
14+
@tool(name_or_callable="查询知识图谱", description="使用这个工具可以查询知识图谱中包含的三元组信息。")
1515
def query_knowledge_graph(query: Annotated[str, "The keyword to query knowledge graph."]) -> Any:
1616
"""Use this to query knowledge graph, which include some food domain knowledge."""
1717
try:
@@ -35,7 +35,9 @@ def get_static_tools() -> list:
3535

3636
# 检查是否启用网页搜索
3737
if config.enable_web_search:
38-
static_tools.append(TavilySearch(max_results=10))
38+
search = TavilySearch(max_results=10)
39+
search.metadata = {"name": "Tavily 网页搜索"}
40+
static_tools.append(search)
3941

4042
return static_tools
4143

@@ -76,9 +78,6 @@ async def async_retriever_wrapper(query_text: str) -> Any:
7678

7779
for db_id, retrieve_info in retrievers.items():
7880
try:
79-
# 使用改进的工具ID生成策略
80-
tool_id = f"query_{db_id[:8]}"
81-
8281
# 构建工具描述
8382
description = (
8483
f"使用 {retrieve_info['name']} 知识库进行检索。\n"
@@ -88,10 +87,12 @@ async def async_retriever_wrapper(query_text: str) -> Any:
8887
# 使用工厂函数创建检索器包装函数,避免闭包问题
8988
retriever_wrapper = _create_retriever_wrapper(db_id, retrieve_info)
9089

90+
safename = retrieve_info["name"].replace(" ", "_")[:20]
91+
9192
# 使用 StructuredTool.from_function 创建异步工具
9293
tool = StructuredTool.from_function(
9394
coroutine=retriever_wrapper,
94-
name=tool_id,
95+
name=safename,
9596
description=description,
9697
args_schema=KnowledgeRetrieverModel,
9798
metadata=retrieve_info["metadata"] | {"tag": ["knowledgebase"]},

src/config/app.py

Lines changed: 16 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414
import tomli
1515
import tomli_w
16-
from pydantic import BaseModel, Field, field_validator
16+
from pydantic import BaseModel, Field
1717

1818
from src.config.static.models import (
1919
DEFAULT_CHAT_MODEL_PROVIDERS,
@@ -172,13 +172,10 @@ def _handle_environment(self):
172172
self.model_dir = os.environ.get("MODEL_DIR", self.model_dir)
173173
if self.model_dir:
174174
if os.path.exists(self.model_dir):
175-
logger.debug(
176-
f"Model directory ({self.model_dir}) contains: {os.listdir(self.model_dir)}"
177-
)
175+
logger.debug(f"Model directory ({self.model_dir}) contains: {os.listdir(self.model_dir)}")
178176
else:
179177
logger.warning(
180-
f"Model directory ({self.model_dir}) does not exist. "
181-
"If not configured, please ignore it."
178+
f"Model directory ({self.model_dir}) does not exist. If not configured, please ignore it."
182179
)
183180

184181
# 检查模型提供商的环境变量
@@ -195,14 +192,10 @@ def _handle_environment(self):
195192
self.enable_web_search = True
196193

197194
# 获取可用的模型提供商
198-
self.valuable_model_provider = [
199-
k for k, v in self.model_provider_status.items() if v
200-
]
195+
self.valuable_model_provider = [k for k, v in self.model_provider_status.items() if v]
201196

202197
if not self.valuable_model_provider:
203-
raise ValueError(
204-
"No model provider available, please check your `.env` file."
205-
)
198+
raise ValueError("No model provider available, please check your `.env` file.")
206199

207200
def save(self):
208201
"""保存配置到 TOML 文件(仅保存用户修改的字段)"""
@@ -251,15 +244,11 @@ def dump_config(self) -> dict[str, Any]:
251244
)
252245

253246
# 添加模型信息(转换为字典格式供前端使用)
254-
config_dict["model_names"] = {
255-
provider: info.model_dump() for provider, info in self.model_names.items()
256-
}
247+
config_dict["model_names"] = {provider: info.model_dump() for provider, info in self.model_names.items()}
257248
config_dict["embed_model_names"] = {
258249
model_id: info.model_dump() for model_id, info in self.embed_model_names.items()
259250
}
260-
config_dict["reranker_names"] = {
261-
model_id: info.model_dump() for model_id, info in self.reranker_names.items()
262-
}
251+
config_dict["reranker_names"] = {model_id: info.model_dump() for model_id, info in self.reranker_names.items()}
263252

264253
# 添加运行时状态信息
265254
config_dict["model_provider_status"] = self.model_provider_status
@@ -269,10 +258,12 @@ def dump_config(self) -> dict[str, Any]:
269258
for field_name, field_info in Config.model_fields.items():
270259
if not field_info.exclude: # 排除内部字段
271260
fields_info[field_name] = {
272-
'des': field_info.description,
273-
'default': field_info.default,
274-
'type': field_info.annotation.__name__ if hasattr(field_info.annotation, '__name__') else str(field_info.annotation),
275-
'exclude': field_info.exclude if hasattr(field_info, 'exclude') else False,
261+
"des": field_info.description,
262+
"default": field_info.default,
263+
"type": field_info.annotation.__name__
264+
if hasattr(field_info.annotation, "__name__")
265+
else str(field_info.annotation),
266+
"exclude": field_info.exclude if hasattr(field_info, "exclude") else False,
276267
}
277268
config_dict["_config_items"] = fields_info
278269

@@ -301,14 +292,12 @@ def get_reranker_choices(self) -> list[str]:
301292

302293
def __getitem__(self, key: str) -> Any:
303294
"""支持字典式访问 config[key]"""
304-
logger.warning("Using deprecated dict-style access for Config. "
305-
"Please use attribute access instead.")
295+
logger.warning("Using deprecated dict-style access for Config. Please use attribute access instead.")
306296
return getattr(self, key, None)
307297

308298
def __setitem__(self, key: str, value: Any):
309299
"""支持字典式赋值 config[key] = value"""
310-
logger.warning("Using deprecated dict-style assignment for Config. "
311-
"Please use attribute access instead.")
300+
logger.warning("Using deprecated dict-style assignment for Config. Please use attribute access instead.")
312301
setattr(self, key, value)
313302

314303
def update(self, other: dict):
@@ -352,8 +341,7 @@ def _save_models_to_file(self, provider_name: str = None):
352341
else:
353342
# 保存所有 model_names
354343
user_config["model_names"] = {
355-
provider: info.model_dump()
356-
for provider, info in self.model_names.items()
344+
provider: info.model_dump() for provider, info in self.model_names.items()
357345
}
358346
# 记录整个 model_names 字段的修改
359347
self._user_modified_fields.add("model_names")

src/knowledge/base.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import json
22
import os
3-
import tempfile
43
import shutil
4+
import tempfile
55
from abc import ABC, abstractmethod
66
from typing import Any
77

@@ -144,8 +144,8 @@ def create_database(
144144
"name": database_name,
145145
"description": description,
146146
"kb_type": self.kb_type,
147-
"embed_info": embed_info.model_dump() if hasattr(embed_info, 'model_dump') else embed_info,
148-
"llm_info": llm_info.model_dump() if hasattr(llm_info, 'model_dump') else llm_info,
147+
"embed_info": embed_info.model_dump() if hasattr(embed_info, "model_dump") else embed_info,
148+
"llm_info": llm_info.model_dump() if hasattr(llm_info, "model_dump") else llm_info,
149149
"metadata": kwargs,
150150
"created_at": utc_isoformat(),
151151
}
@@ -559,7 +559,7 @@ def _load_metadata(self):
559559

560560
def _serialize_metadata(self, obj):
561561
"""递归序列化元数据中的 Pydantic 模型"""
562-
if hasattr(obj, 'dict'):
562+
if hasattr(obj, "dict"):
563563
return obj.dict()
564564
elif isinstance(obj, dict):
565565
return {k: self._serialize_metadata(v) for k, v in obj.items()}
@@ -589,8 +589,7 @@ def _save_metadata(self):
589589

590590
# 原子性写入(使用临时文件)
591591
with tempfile.NamedTemporaryFile(
592-
mode='w', dir=os.path.dirname(meta_file),
593-
prefix='.tmp_', suffix='.json', delete=False
592+
mode="w", dir=os.path.dirname(meta_file), prefix=".tmp_", suffix=".json", delete=False
594593
) as tmp_file:
595594
json.dump(data, tmp_file, ensure_ascii=False, indent=2)
596595
temp_path = tmp_file.name

src/knowledge/graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -299,7 +299,7 @@ def _batch_set_embeddings(tx, entity_embedding_pairs):
299299
logger.info(f"Adding entity to {kgdb_name}")
300300
session.execute_write(_create_graph, triples)
301301
logger.info(f"Creating vector index for {kgdb_name} with {config.embed_model}")
302-
session.execute_write(_create_vector_index, getattr(cur_embed_info, 'dimension', 1024))
302+
session.execute_write(_create_vector_index, getattr(cur_embed_info, "dimension", 1024))
303303

304304
# 收集所有需要处理的实体名称,去重
305305
all_entities = []

0 commit comments

Comments
 (0)