Skip to content
This repository was archived by the owner on Dec 7, 2025. It is now read-only.

Commit e09f2c9

Browse files
Fix v35
1 parent f6f869d commit e09f2c9

File tree

2 files changed

+2
-6
lines changed

2 files changed

+2
-6
lines changed

apps/chat/app/core/llm_client.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,21 +15,17 @@ class LLMClient:
1515
def __init__(self, settings=None):
1616
self.settings = settings or get_settings()
1717

18-
async def generate_reply(self, messages: List[Message], user_api_key: Optional[str] = None) -> str:
18+
async def generate_reply(self, messages: List[Message]) -> str:
1919
payload = {
2020
"model": self.settings.chat_model,
2121
"messages": [m.dict() for m in messages],
2222
}
2323

24-
# Используем ключ клиента, если передан, иначе дефолтный из настроек
25-
api_key = user_api_key or self.settings.openai_api_key
26-
2724
try:
2825
async with httpx.AsyncClient(timeout=15.0) as client:
2926
response = await client.post(
3027
f"{self.settings.llm_api_url}/chat/completions",
3128
json=payload,
32-
# headers={"Authorization": f"Bearer {api_key}"}
3329
)
3430
response.raise_for_status()
3531
data = response.json()

apps/chat/app/services/chat_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,5 +11,5 @@ def __init__(self, llm_client: LLMClient = None):
1111
self.llm_client = llm_client or LLMClient()
1212

1313
async def get_ai_reply(self, messages: List[Message], user_api_key: Optional[str] = None) -> str:
14-
reply = await self.llm_client.generate_reply(messages, user_api_key)
14+
reply = await self.llm_client.generate_reply(messages)
1515
return reply

0 commit comments

Comments
 (0)