Skip to content

Commit 5c3e404

Browse files
authored
Merge pull request #24 from sd3420625/main
feat: Add custom API support for OpenAI-compatible endpoints
2 parents 6b6fcad + e607197 commit 5c3e404

File tree

7 files changed

+236
-18
lines changed

7 files changed

+236
-18
lines changed

backend/.env.example

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,12 @@
22
# Get your key at https://openrouter.ai/
33
OPENROUTER_API_KEY=your_openrouter_api_key_here
44

5+
# ===== Custom API (OpenAI Compatible) =====
6+
# For connecting to vLLM, LocalAI, LM Studio, or any OpenAI-compatible endpoint
7+
# CUSTOM_API_BASE_URL=http://localhost:8080/v1
8+
# CUSTOM_API_KEY=your_api_key_here
9+
# CUSTOM_API_MODEL=your-model-name
10+
511
# ===== GPU Configuration =====
612
# Set to "true", "false", or "auto" (default: auto)
713
# HEARTMULA_4BIT=auto

backend/app/main.py

Lines changed: 35 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -651,15 +651,22 @@ async def reload_models(settings: GPUSettingsRequest, background_tasks: Backgrou
651651
def get_llm_settings():
652652
"""Get current LLM provider settings."""
653653
settings = LLMService.get_settings()
654-
# Mask API key for security (show only last 4 chars)
655-
api_key = settings.get("openrouter_api_key", "")
656-
masked_key = f"***{api_key[-4:]}" if api_key and len(api_key) > 4 else ""
654+
# Mask API keys for security (show only last 4 chars)
655+
openrouter_key = settings.get("openrouter_api_key", "")
656+
masked_openrouter_key = f"***{openrouter_key[-4:]}" if openrouter_key and len(openrouter_key) > 4 else ""
657+
658+
custom_api_key = settings.get("custom_api_key", "")
659+
masked_custom_key = f"***{custom_api_key[-4:]}" if custom_api_key and len(custom_api_key) > 4 else ""
657660

658661
return {
659662
"ollama_host": settings.get("ollama_host", ""),
660-
"openrouter_api_key": masked_key,
663+
"openrouter_api_key": masked_openrouter_key,
661664
"ollama_available": LLMService.check_ollama_available(),
662-
"openrouter_available": LLMService.check_openrouter_available()
665+
"openrouter_available": LLMService.check_openrouter_available(),
666+
"custom_api_base_url": settings.get("custom_api_base_url", ""),
667+
"custom_api_key": masked_custom_key,
668+
"custom_api_model": settings.get("custom_api_model", ""),
669+
"custom_api_available": LLMService.check_custom_api_available()
663670
}
664671

665672

@@ -675,19 +682,38 @@ def update_llm_settings(settings: LLMSettingsRequest):
675682
LLMService.update_settings(openrouter_api_key=settings.openrouter_api_key)
676683
music_service.current_settings["openrouter_api_key"] = settings.openrouter_api_key
677684

685+
if settings.custom_api_base_url is not None:
686+
LLMService.update_settings(custom_api_base_url=settings.custom_api_base_url)
687+
music_service.current_settings["custom_api_base_url"] = settings.custom_api_base_url
688+
689+
if settings.custom_api_key is not None:
690+
LLMService.update_settings(custom_api_key=settings.custom_api_key)
691+
music_service.current_settings["custom_api_key"] = settings.custom_api_key
692+
693+
if settings.custom_api_model is not None:
694+
LLMService.update_settings(custom_api_model=settings.custom_api_model)
695+
music_service.current_settings["custom_api_model"] = settings.custom_api_model
696+
678697
# Save to persistent storage
679698
music_service._save_settings()
680699

681700
# Return updated settings
682701
current = LLMService.get_settings()
683-
api_key = current.get("openrouter_api_key", "")
684-
masked_key = f"***{api_key[-4:]}" if api_key and len(api_key) > 4 else ""
702+
openrouter_key = current.get("openrouter_api_key", "")
703+
masked_openrouter_key = f"***{openrouter_key[-4:]}" if openrouter_key and len(openrouter_key) > 4 else ""
704+
705+
custom_api_key = current.get("custom_api_key", "")
706+
masked_custom_key = f"***{custom_api_key[-4:]}" if custom_api_key and len(custom_api_key) > 4 else ""
685707

686708
return {
687709
"ollama_host": current.get("ollama_host", ""),
688-
"openrouter_api_key": masked_key,
710+
"openrouter_api_key": masked_openrouter_key,
689711
"ollama_available": LLMService.check_ollama_available(),
690-
"openrouter_available": LLMService.check_openrouter_available()
712+
"openrouter_available": LLMService.check_openrouter_available(),
713+
"custom_api_base_url": current.get("custom_api_base_url", ""),
714+
"custom_api_key": masked_custom_key,
715+
"custom_api_model": current.get("custom_api_model", ""),
716+
"custom_api_available": LLMService.check_custom_api_available()
691717
}
692718

693719

backend/app/models.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -155,10 +155,17 @@ class ModelReloadResponse(SQLModel):
155155
class LLMSettingsRequest(SQLModel):
156156
ollama_host: Optional[str] = None
157157
openrouter_api_key: Optional[str] = None
158+
custom_api_base_url: Optional[str] = None
159+
custom_api_key: Optional[str] = None
160+
custom_api_model: Optional[str] = None
158161

159162

160163
class LLMSettingsResponse(SQLModel):
161164
ollama_host: str
162165
openrouter_api_key: str # Will be masked in response
163166
ollama_available: bool
164167
openrouter_available: bool
168+
custom_api_base_url: str
169+
custom_api_key: str # Will be masked in response
170+
custom_api_model: str
171+
custom_api_available: bool

backend/app/services/llm_service.py

Lines changed: 66 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,29 +31,50 @@
3131
# Default values from environment (can be overridden by settings)
3232
_DEFAULT_OPENROUTER_KEY = os.environ.get("OPENROUTER_API_KEY", "")
3333
_DEFAULT_OLLAMA_HOST = os.environ.get("OLLAMA_HOST", "http://localhost:11434")
34+
_DEFAULT_CUSTOM_API_BASE_URL = os.environ.get("CUSTOM_API_BASE_URL", "")
35+
_DEFAULT_CUSTOM_API_KEY = os.environ.get("CUSTOM_API_KEY", "")
36+
_DEFAULT_CUSTOM_API_MODEL = os.environ.get("CUSTOM_API_MODEL", "")
3437

3538
class LLMService:
3639
# Configurable settings (can be updated at runtime)
3740
OLLAMA_BASE_URL = _DEFAULT_OLLAMA_HOST
3841
OPENROUTER_API_KEY = _DEFAULT_OPENROUTER_KEY
3942
OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1"
43+
# Custom API settings (OpenAI-compatible endpoints like vLLM, LocalAI, LM Studio)
44+
CUSTOM_API_BASE_URL = _DEFAULT_CUSTOM_API_BASE_URL
45+
CUSTOM_API_KEY = _DEFAULT_CUSTOM_API_KEY
46+
CUSTOM_API_MODEL = _DEFAULT_CUSTOM_API_MODEL
4047

4148
@classmethod
42-
def update_settings(cls, ollama_host: str = None, openrouter_api_key: str = None):
49+
def update_settings(cls, ollama_host: str = None, openrouter_api_key: str = None,
50+
custom_api_base_url: str = None, custom_api_key: str = None,
51+
custom_api_model: str = None):
4352
"""Update LLM service settings at runtime."""
4453
if ollama_host is not None:
4554
cls.OLLAMA_BASE_URL = ollama_host if ollama_host else _DEFAULT_OLLAMA_HOST
4655
logger.info(f"[LLM] Updated Ollama host: {cls.OLLAMA_BASE_URL}")
4756
if openrouter_api_key is not None:
4857
cls.OPENROUTER_API_KEY = openrouter_api_key
4958
logger.info(f"[LLM] Updated OpenRouter API key: {'***' + openrouter_api_key[-4:] if openrouter_api_key else '(empty)'}")
59+
if custom_api_base_url is not None:
60+
cls.CUSTOM_API_BASE_URL = custom_api_base_url
61+
logger.info(f"[LLM] Updated Custom API base URL: {cls.CUSTOM_API_BASE_URL or '(empty)'}")
62+
if custom_api_key is not None:
63+
cls.CUSTOM_API_KEY = custom_api_key
64+
logger.info(f"[LLM] Updated Custom API key: {'***' + custom_api_key[-4:] if custom_api_key else '(empty)'}")
65+
if custom_api_model is not None:
66+
cls.CUSTOM_API_MODEL = custom_api_model
67+
logger.info(f"[LLM] Updated Custom API model: {cls.CUSTOM_API_MODEL or '(empty)'}")
5068

5169
@classmethod
5270
def get_settings(cls) -> dict:
5371
"""Get current LLM service settings."""
5472
return {
5573
"ollama_host": cls.OLLAMA_BASE_URL,
56-
"openrouter_api_key": cls.OPENROUTER_API_KEY
74+
"openrouter_api_key": cls.OPENROUTER_API_KEY,
75+
"custom_api_base_url": cls.CUSTOM_API_BASE_URL,
76+
"custom_api_key": cls.CUSTOM_API_KEY,
77+
"custom_api_model": cls.CUSTOM_API_MODEL
5778
}
5879

5980
@classmethod
@@ -70,6 +91,11 @@ def check_openrouter_available(cls) -> bool:
7091
"""Check if OpenRouter API key is set."""
7192
return bool(cls.OPENROUTER_API_KEY)
7293

94+
@classmethod
95+
def check_custom_api_available(cls) -> bool:
96+
"""Check if Custom API is configured (base URL and model are required)."""
97+
return bool(cls.CUSTOM_API_BASE_URL and cls.CUSTOM_API_MODEL)
98+
7399
@classmethod
74100
def get_models(cls) -> List[dict]:
75101
"""Returns available models from both Ollama and OpenRouter."""
@@ -99,6 +125,14 @@ def get_models(cls) -> List[dict]:
99125
]
100126
models.extend(openrouter_models)
101127

128+
# Add Custom API model if configured
129+
if cls.CUSTOM_API_BASE_URL and cls.CUSTOM_API_MODEL:
130+
models.append({
131+
"id": cls.CUSTOM_API_MODEL,
132+
"name": f"Custom: {cls.CUSTOM_API_MODEL}",
133+
"provider": "custom"
134+
})
135+
102136
return models
103137

104138
@staticmethod
@@ -154,11 +188,41 @@ def _call_openrouter(cls, model: str, prompt: str, temperature: float = 0.7) ->
154188
return data["choices"][0]["message"]["content"]
155189
raise Exception(f"OpenRouter Error: {resp.status_code} - {resp.text}")
156190

191+
@classmethod
192+
def _call_custom_api(cls, model: str, prompt: str, temperature: float = 0.7) -> str:
193+
"""Call a custom OpenAI-compatible API (vLLM, LocalAI, LM Studio, etc.)."""
194+
headers = {"Content-Type": "application/json"}
195+
if cls.CUSTOM_API_KEY:
196+
headers["Authorization"] = f"Bearer {cls.CUSTOM_API_KEY}"
197+
198+
payload = {
199+
"model": model,
200+
"messages": [{"role": "user", "content": prompt}],
201+
"temperature": temperature
202+
}
203+
204+
# Ensure base URL doesn't have trailing slash
205+
base_url = cls.CUSTOM_API_BASE_URL.rstrip('/')
206+
207+
resp = requests.post(
208+
f"{base_url}/chat/completions",
209+
headers=headers,
210+
json=payload,
211+
timeout=60
212+
)
213+
214+
if resp.status_code == 200:
215+
data = resp.json()
216+
return data["choices"][0]["message"]["content"]
217+
raise Exception(f"Custom API Error: {resp.status_code} - {resp.text}")
218+
157219
@classmethod
158220
def _call_llm(cls, model: str, prompt: str, provider: str = "ollama", json_mode: bool = False, temperature: float = 0.7) -> str:
159221
"""Unified LLM call that routes to appropriate provider."""
160222
if provider == "openrouter":
161223
return cls._call_openrouter(model, prompt, temperature)
224+
elif provider == "custom":
225+
return cls._call_custom_api(model, prompt, temperature)
162226
else:
163227
return cls._call_ollama(model, prompt, json_mode, temperature)
164228

backend/app/services/music_service.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -755,7 +755,11 @@ def __new__(cls):
755755
"torch_compile_mode": "default",
756756
# LLM Provider settings
757757
"ollama_host": "",
758-
"openrouter_api_key": ""
758+
"openrouter_api_key": "",
759+
# Custom API settings
760+
"custom_api_base_url": "",
761+
"custom_api_key": "",
762+
"custom_api_model": ""
759763
}
760764
# Load persisted settings from disk (overrides defaults)
761765
cls._instance._load_settings()
@@ -806,6 +810,13 @@ def _load_settings(self):
806810
LLMService.update_settings(ollama_host=saved["ollama_host"])
807811
if "openrouter_api_key" in saved and saved["openrouter_api_key"]:
808812
LLMService.update_settings(openrouter_api_key=saved["openrouter_api_key"])
813+
# Apply Custom API settings
814+
if "custom_api_base_url" in saved:
815+
LLMService.update_settings(custom_api_base_url=saved["custom_api_base_url"])
816+
if "custom_api_key" in saved:
817+
LLMService.update_settings(custom_api_key=saved["custom_api_key"])
818+
if "custom_api_model" in saved:
819+
LLMService.update_settings(custom_api_model=saved["custom_api_model"])
809820

810821
logger.info(f"[Settings] Loaded from {SETTINGS_FILE}: {self.current_settings}")
811822
except Exception as e:

frontend/src/api.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ export interface Job {
2121
export interface LLMModel {
2222
id: string;
2323
name: string;
24-
provider: 'ollama' | 'openrouter';
24+
provider: 'ollama' | 'openrouter' | 'custom';
2525
}
2626

2727
export interface Playlist {
@@ -77,6 +77,10 @@ export interface LLMSettings {
7777
openrouter_api_key: string;
7878
ollama_available: boolean;
7979
openrouter_available: boolean;
80+
custom_api_base_url: string;
81+
custom_api_key: string;
82+
custom_api_model: string;
83+
custom_api_available: boolean;
8084
}
8185

8286
export const api = {
@@ -352,7 +356,13 @@ export const api = {
352356
return res.data;
353357
},
354358

355-
updateLLMSettings: async (settings: { ollama_host?: string; openrouter_api_key?: string }): Promise<LLMSettings> => {
359+
updateLLMSettings: async (settings: {
360+
ollama_host?: string;
361+
openrouter_api_key?: string;
362+
custom_api_base_url?: string;
363+
custom_api_key?: string;
364+
custom_api_model?: string;
365+
}): Promise<LLMSettings> => {
356366
const res = await axios.put(`${API_BASE_URL}/settings/llm`, settings);
357367
return res.data;
358368
}

0 commit comments

Comments
 (0)