3131# Default values from environment (can be overridden by settings)
3232_DEFAULT_OPENROUTER_KEY = os .environ .get ("OPENROUTER_API_KEY" , "" )
3333_DEFAULT_OLLAMA_HOST = os .environ .get ("OLLAMA_HOST" , "http://localhost:11434" )
34+ _DEFAULT_CUSTOM_API_BASE_URL = os .environ .get ("CUSTOM_API_BASE_URL" , "" )
35+ _DEFAULT_CUSTOM_API_KEY = os .environ .get ("CUSTOM_API_KEY" , "" )
36+ _DEFAULT_CUSTOM_API_MODEL = os .environ .get ("CUSTOM_API_MODEL" , "" )
3437
3538class LLMService :
3639 # Configurable settings (can be updated at runtime)
3740 OLLAMA_BASE_URL = _DEFAULT_OLLAMA_HOST
3841 OPENROUTER_API_KEY = _DEFAULT_OPENROUTER_KEY
3942 OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1"
43+ # Custom API settings (OpenAI-compatible endpoints like vLLM, LocalAI, LM Studio)
44+ CUSTOM_API_BASE_URL = _DEFAULT_CUSTOM_API_BASE_URL
45+ CUSTOM_API_KEY = _DEFAULT_CUSTOM_API_KEY
46+ CUSTOM_API_MODEL = _DEFAULT_CUSTOM_API_MODEL
4047
4148 @classmethod
42- def update_settings (cls , ollama_host : str = None , openrouter_api_key : str = None ):
49+ def update_settings (cls , ollama_host : str = None , openrouter_api_key : str = None ,
50+ custom_api_base_url : str = None , custom_api_key : str = None ,
51+ custom_api_model : str = None ):
4352 """Update LLM service settings at runtime."""
4453 if ollama_host is not None :
4554 cls .OLLAMA_BASE_URL = ollama_host if ollama_host else _DEFAULT_OLLAMA_HOST
4655 logger .info (f"[LLM] Updated Ollama host: { cls .OLLAMA_BASE_URL } " )
4756 if openrouter_api_key is not None :
4857 cls .OPENROUTER_API_KEY = openrouter_api_key
4958 logger .info (f"[LLM] Updated OpenRouter API key: { '***' + openrouter_api_key [- 4 :] if openrouter_api_key else '(empty)' } " )
59+ if custom_api_base_url is not None :
60+ cls .CUSTOM_API_BASE_URL = custom_api_base_url
61+ logger .info (f"[LLM] Updated Custom API base URL: { cls .CUSTOM_API_BASE_URL or '(empty)' } " )
62+ if custom_api_key is not None :
63+ cls .CUSTOM_API_KEY = custom_api_key
64+ logger .info (f"[LLM] Updated Custom API key: { '***' + custom_api_key [- 4 :] if custom_api_key else '(empty)' } " )
65+ if custom_api_model is not None :
66+ cls .CUSTOM_API_MODEL = custom_api_model
67+ logger .info (f"[LLM] Updated Custom API model: { cls .CUSTOM_API_MODEL or '(empty)' } " )
5068
5169 @classmethod
5270 def get_settings (cls ) -> dict :
5371 """Get current LLM service settings."""
5472 return {
5573 "ollama_host" : cls .OLLAMA_BASE_URL ,
56- "openrouter_api_key" : cls .OPENROUTER_API_KEY
74+ "openrouter_api_key" : cls .OPENROUTER_API_KEY ,
75+ "custom_api_base_url" : cls .CUSTOM_API_BASE_URL ,
76+ "custom_api_key" : cls .CUSTOM_API_KEY ,
77+ "custom_api_model" : cls .CUSTOM_API_MODEL
5778 }
5879
5980 @classmethod
@@ -70,6 +91,11 @@ def check_openrouter_available(cls) -> bool:
7091 """Check if OpenRouter API key is set."""
7192 return bool (cls .OPENROUTER_API_KEY )
7293
94+ @classmethod
95+ def check_custom_api_available (cls ) -> bool :
96+ """Check if Custom API is configured (base URL and model are required)."""
97+ return bool (cls .CUSTOM_API_BASE_URL and cls .CUSTOM_API_MODEL )
98+
7399 @classmethod
74100 def get_models (cls ) -> List [dict ]:
75101 """Returns available models from both Ollama and OpenRouter."""
@@ -99,6 +125,14 @@ def get_models(cls) -> List[dict]:
99125 ]
100126 models .extend (openrouter_models )
101127
128+ # Add Custom API model if configured
129+ if cls .CUSTOM_API_BASE_URL and cls .CUSTOM_API_MODEL :
130+ models .append ({
131+ "id" : cls .CUSTOM_API_MODEL ,
132+ "name" : f"Custom: { cls .CUSTOM_API_MODEL } " ,
133+ "provider" : "custom"
134+ })
135+
102136 return models
103137
104138 @staticmethod
@@ -154,11 +188,41 @@ def _call_openrouter(cls, model: str, prompt: str, temperature: float = 0.7) ->
154188 return data ["choices" ][0 ]["message" ]["content" ]
155189 raise Exception (f"OpenRouter Error: { resp .status_code } - { resp .text } " )
156190
191+ @classmethod
192+ def _call_custom_api (cls , model : str , prompt : str , temperature : float = 0.7 ) -> str :
193+ """Call a custom OpenAI-compatible API (vLLM, LocalAI, LM Studio, etc.)."""
194+ headers = {"Content-Type" : "application/json" }
195+ if cls .CUSTOM_API_KEY :
196+ headers ["Authorization" ] = f"Bearer { cls .CUSTOM_API_KEY } "
197+
198+ payload = {
199+ "model" : model ,
200+ "messages" : [{"role" : "user" , "content" : prompt }],
201+ "temperature" : temperature
202+ }
203+
204+ # Ensure base URL doesn't have trailing slash
205+ base_url = cls .CUSTOM_API_BASE_URL .rstrip ('/' )
206+
207+ resp = requests .post (
208+ f"{ base_url } /chat/completions" ,
209+ headers = headers ,
210+ json = payload ,
211+ timeout = 60
212+ )
213+
214+ if resp .status_code == 200 :
215+ data = resp .json ()
216+ return data ["choices" ][0 ]["message" ]["content" ]
217+ raise Exception (f"Custom API Error: { resp .status_code } - { resp .text } " )
218+
157219 @classmethod
158220 def _call_llm (cls , model : str , prompt : str , provider : str = "ollama" , json_mode : bool = False , temperature : float = 0.7 ) -> str :
159221 """Unified LLM call that routes to appropriate provider."""
160222 if provider == "openrouter" :
161223 return cls ._call_openrouter (model , prompt , temperature )
224+ elif provider == "custom" :
225+ return cls ._call_custom_api (model , prompt , temperature )
162226 else :
163227 return cls ._call_ollama (model , prompt , json_mode , temperature )
164228
0 commit comments