Skip to content

Commit df6e6c9

Browse files
authored
Merge pull request #152 from ian-morgan99/copilot/fix-openwebui-endpoint-issue
Fix OpenWebUI endpoint to use /api instead of /v1
2 parents 744e7de + 8a11be9 commit df6e6c9

File tree

2 files changed

+190
-3
lines changed

2 files changed

+190
-3
lines changed

ha_sentry/rootfs/app/ai_client.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -84,10 +84,10 @@ def _initialize_client(self):
8484
timeout=timeout
8585
)
8686
elif self.config.ai_provider == 'openwebui':
87-
# OpenWebUI with compatible endpoint
87+
# OpenWebUI uses /api/chat/completions endpoint
8888
base_url = self.config.ai_endpoint
89-
if not base_url.endswith('/v1'):
90-
base_url = f"{base_url}/v1"
89+
if not base_url.endswith('/api'):
90+
base_url = f"{base_url}/api"
9191
logger.debug(f"Configuring OpenWebUI client with base_url: {base_url}")
9292
self.client = OpenAI(
9393
base_url=base_url,
Lines changed: 187 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,187 @@
1+
"""
2+
Test AI provider endpoint URL construction
3+
Verifies that each AI provider uses the correct base URL format
4+
"""
5+
import sys
6+
import os
7+
8+
# Add the app directory to Python path
9+
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'ha_sentry', 'rootfs', 'app'))
10+
11+
from config_manager import ConfigManager
12+
from ai_client import AIClient
13+
14+
15+
def test_ollama_endpoint_construction():
16+
"""Test that Ollama endpoints get /v1 appended"""
17+
os.environ['AI_ENABLED'] = 'true'
18+
os.environ['AI_PROVIDER'] = 'ollama'
19+
os.environ['AI_ENDPOINT'] = 'http://localhost:11434'
20+
os.environ['AI_MODEL'] = 'llama2'
21+
22+
config = ConfigManager()
23+
ai = AIClient(config)
24+
25+
# Check that the client was initialized with correct base_url
26+
if ai.client:
27+
# The OpenAI client's base_url should end with /v1 (may have trailing slash)
28+
assert ai.client.base_url is not None
29+
base_url_str = str(ai.client.base_url).rstrip('/')
30+
assert base_url_str.endswith('/v1'), f"Ollama base_url should end with /v1, got: {base_url_str}"
31+
print("✓ Test passed: Ollama endpoint has /v1 appended")
32+
return True
33+
else:
34+
print("⚠ Warning: AI client not initialized (OpenAI library may not be available)")
35+
return True
36+
37+
38+
def test_lmstudio_endpoint_construction():
39+
"""Test that LMStudio endpoints get /v1 appended"""
40+
os.environ['AI_ENABLED'] = 'true'
41+
os.environ['AI_PROVIDER'] = 'lmstudio'
42+
os.environ['AI_ENDPOINT'] = 'http://localhost:1234'
43+
os.environ['AI_MODEL'] = 'local-model'
44+
45+
config = ConfigManager()
46+
ai = AIClient(config)
47+
48+
# Check that the client was initialized with correct base_url
49+
if ai.client:
50+
# The OpenAI client's base_url should end with /v1 (may have trailing slash)
51+
assert ai.client.base_url is not None
52+
base_url_str = str(ai.client.base_url).rstrip('/')
53+
assert base_url_str.endswith('/v1'), f"LMStudio base_url should end with /v1, got: {base_url_str}"
54+
print("✓ Test passed: LMStudio endpoint has /v1 appended")
55+
return True
56+
else:
57+
print("⚠ Warning: AI client not initialized (OpenAI library may not be available)")
58+
return True
59+
60+
61+
def test_openwebui_endpoint_construction():
62+
"""Test that OpenWebUI endpoints get /api appended (NOT /v1)"""
63+
os.environ['AI_ENABLED'] = 'true'
64+
os.environ['AI_PROVIDER'] = 'openwebui'
65+
os.environ['AI_ENDPOINT'] = 'http://localhost:8080'
66+
os.environ['AI_MODEL'] = 'gpt-3.5-turbo'
67+
os.environ['API_KEY'] = 'test-key'
68+
69+
config = ConfigManager()
70+
ai = AIClient(config)
71+
72+
# Check that the client was initialized with correct base_url
73+
if ai.client:
74+
# The OpenAI client's base_url should end with /api (not /v1) (may have trailing slash)
75+
assert ai.client.base_url is not None
76+
base_url_str = str(ai.client.base_url).rstrip('/')
77+
assert base_url_str.endswith('/api'), f"OpenWebUI base_url should end with /api, got: {base_url_str}"
78+
assert not base_url_str.endswith('/v1'), f"OpenWebUI base_url should NOT end with /v1, got: {base_url_str}"
79+
print("✓ Test passed: OpenWebUI endpoint has /api appended (not /v1)")
80+
return True
81+
else:
82+
print("⚠ Warning: AI client not initialized (OpenAI library may not be available)")
83+
return True
84+
85+
86+
def test_openai_endpoint_no_modification():
87+
"""Test that OpenAI endpoints are not modified"""
88+
os.environ['AI_ENABLED'] = 'true'
89+
os.environ['AI_PROVIDER'] = 'openai'
90+
os.environ['AI_ENDPOINT'] = '' # Default OpenAI endpoint
91+
os.environ['AI_MODEL'] = 'gpt-4'
92+
os.environ['API_KEY'] = 'test-key'
93+
94+
config = ConfigManager()
95+
ai = AIClient(config)
96+
97+
# For OpenAI with no custom endpoint, base_url should be None (uses default)
98+
if ai.client:
99+
# The OpenAI client should use default base_url when endpoint is empty
100+
print("✓ Test passed: OpenAI uses default endpoint when not specified")
101+
return True
102+
else:
103+
print("⚠ Warning: AI client not initialized (OpenAI library may not be available)")
104+
return True
105+
106+
107+
def test_openwebui_endpoint_already_has_api():
108+
"""Test that /api is not duplicated if user includes it"""
109+
os.environ['AI_ENABLED'] = 'true'
110+
os.environ['AI_PROVIDER'] = 'openwebui'
111+
os.environ['AI_ENDPOINT'] = 'http://localhost:8080/api'
112+
os.environ['AI_MODEL'] = 'gpt-3.5-turbo'
113+
os.environ['API_KEY'] = 'test-key'
114+
115+
config = ConfigManager()
116+
ai = AIClient(config)
117+
118+
# Check that the client was initialized without duplicating /api
119+
if ai.client:
120+
assert ai.client.base_url is not None
121+
base_url_str = str(ai.client.base_url).rstrip('/')
122+
# Should end with /api, not /api/api
123+
assert base_url_str.endswith('/api'), f"OpenWebUI base_url should end with /api, got: {base_url_str}"
124+
assert '/api/api' not in base_url_str, f"OpenWebUI base_url should not have duplicate /api, got: {base_url_str}"
125+
print("✓ Test passed: OpenWebUI does not duplicate /api if already present")
126+
return True
127+
else:
128+
print("⚠ Warning: AI client not initialized (OpenAI library may not be available)")
129+
return True
130+
131+
132+
def test_ollama_endpoint_already_has_v1():
133+
"""Test that /v1 is not duplicated if user includes it"""
134+
os.environ['AI_ENABLED'] = 'true'
135+
os.environ['AI_PROVIDER'] = 'ollama'
136+
os.environ['AI_ENDPOINT'] = 'http://localhost:11434/v1'
137+
os.environ['AI_MODEL'] = 'llama2'
138+
139+
config = ConfigManager()
140+
ai = AIClient(config)
141+
142+
# Check that the client was initialized without duplicating /v1
143+
if ai.client:
144+
assert ai.client.base_url is not None
145+
base_url_str = str(ai.client.base_url).rstrip('/')
146+
# Should end with /v1, not /v1/v1
147+
assert base_url_str.endswith('/v1'), f"Ollama base_url should end with /v1, got: {base_url_str}"
148+
assert '/v1/v1' not in base_url_str, f"Ollama base_url should not have duplicate /v1, got: {base_url_str}"
149+
print("✓ Test passed: Ollama does not duplicate /v1 if already present")
150+
return True
151+
else:
152+
print("⚠ Warning: AI client not initialized (OpenAI library may not be available)")
153+
return True
154+
155+
156+
if __name__ == '__main__':
157+
print("Running AI provider endpoint URL construction tests...\n")
158+
159+
tests = [
160+
test_ollama_endpoint_construction,
161+
test_lmstudio_endpoint_construction,
162+
test_openwebui_endpoint_construction,
163+
test_openai_endpoint_no_modification,
164+
test_openwebui_endpoint_already_has_api,
165+
test_ollama_endpoint_already_has_v1
166+
]
167+
168+
passed = 0
169+
failed = 0
170+
171+
for test in tests:
172+
try:
173+
if test():
174+
passed += 1
175+
else:
176+
failed += 1
177+
except Exception as e:
178+
print(f"✗ Test {test.__name__} failed with exception: {e}")
179+
import traceback
180+
traceback.print_exc()
181+
failed += 1
182+
183+
print(f"\n{'='*50}")
184+
print(f"Tests completed: {passed} passed, {failed} failed")
185+
print(f"{'='*50}")
186+
187+
sys.exit(0 if failed == 0 else 1)

0 commit comments

Comments
 (0)