This repository was archived by the owner on Dec 7, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 2 files changed +2
-6
lines changed
Expand file tree Collapse file tree 2 files changed +2
-6
lines changed Original file line number Diff line number Diff line change @@ -15,21 +15,17 @@ class LLMClient:
1515 def __init__ (self , settings = None ):
1616 self .settings = settings or get_settings ()
1717
18- async def generate_reply (self , messages : List [Message ], user_api_key : Optional [ str ] = None ) -> str :
18+ async def generate_reply (self , messages : List [Message ]) -> str :
1919 payload = {
2020 "model" : self .settings .chat_model ,
2121 "messages" : [m .dict () for m in messages ],
2222 }
2323
24- # Используем ключ клиента, если передан, иначе дефолтный из настроек
25- api_key = user_api_key or self .settings .openai_api_key
26-
2724 try :
2825 async with httpx .AsyncClient (timeout = 15.0 ) as client :
2926 response = await client .post (
3027 f"{ self .settings .llm_api_url } /chat/completions" ,
3128 json = payload ,
32- # headers={"Authorization": f"Bearer {api_key}"}
3329 )
3430 response .raise_for_status ()
3531 data = response .json ()
Original file line number Diff line number Diff line change @@ -11,5 +11,5 @@ def __init__(self, llm_client: LLMClient = None):
1111 self .llm_client = llm_client or LLMClient ()
1212
1313 async def get_ai_reply (self , messages : List [Message ], user_api_key : Optional [str ] = None ) -> str :
14- reply = await self .llm_client .generate_reply (messages , user_api_key )
14+ reply = await self .llm_client .generate_reply (messages )
1515 return reply
You can’t perform that action at this time.
0 commit comments