Skip to content

Commit 3a93ddc

Browse files
committed
Fixed missing new model dependency.
1 parent e83d8b4 commit 3a93ddc

File tree

2 files changed

+38
-16
lines changed

2 files changed

+38
-16
lines changed

compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ services:
6161
- "PORT=9017"
6262
- "MCP_URL=http://repository-manager-mcp:8017/mcp"
6363
- "PROVIDER=openai"
64-
- "LLM_BASE_URL=http://host.docker.internal:1234/v1"
64+
- "LLM_BASE_URL=${LLM_BASE_URL:-http://host.docker.internal:1234/v1}"
6565
- "LLM_API_KEY=llama"
6666
- "MODEL_ID=${MODEL_ID:-qwen/qwen3-coder-next}"
6767
- "DEBUG=False"

repository_manager/utils.py

Lines changed: 37 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,23 @@ def create_model(
279279
api_key: Optional[str] = None,
280280
ssl_verify: bool = True,
281281
):
282-
http_client = get_http_client(ssl_verify=ssl_verify)
282+
"""
283+
Create a Pydantic AI model with the specified provider and configuration.
284+
285+
Args:
286+
provider: The model provider (openai, anthropic, google, groq, mistral, huggingface, ollama)
287+
model_id: The specific model ID to use
288+
base_url: Optional base URL for the API
289+
api_key: Optional API key
290+
ssl_verify: Whether to verify SSL certificates (default: True)
291+
292+
Returns:
293+
A Pydantic AI Model instance
294+
"""
295+
# Create a custom HTTP client if SSL verification is disabled
296+
http_client = None
297+
if not ssl_verify:
298+
http_client = httpx.AsyncClient(verify=False)
283299

284300
if provider == "openai":
285301
target_base_url = base_url
@@ -288,18 +304,18 @@ def create_model(
288304
# If we have a custom client or specific settings, we might want to use the explicit provider object
289305
if http_client and AsyncOpenAI and OpenAIProvider:
290306
client = AsyncOpenAI(
291-
api_key=target_api_key or os.environ.get("LLM_API_KEY"),
292-
base_url=target_base_url or os.environ.get("LLM_BASE_URL"),
307+
api_key=target_api_key or os.environ.get("OPENAI_API_KEY"),
308+
base_url=target_base_url or os.environ.get("OPENAI_BASE_URL"),
293309
http_client=http_client,
294310
)
295311
provider_instance = OpenAIProvider(openai_client=client)
296312
return OpenAIChatModel(model_name=model_id, provider=provider_instance)
297313

298314
# Fallback to standard env vars
299315
if target_base_url:
300-
os.environ["LLM_BASE_URL"] = target_base_url
316+
os.environ["OPENAI_BASE_URL"] = target_base_url
301317
if target_api_key:
302-
os.environ["LLM_API_KEY"] = target_api_key
318+
os.environ["OPENAI_API_KEY"] = target_api_key
303319
return OpenAIChatModel(model_name=model_id, provider="openai")
304320

305321
elif provider == "ollama":
@@ -316,18 +332,21 @@ def create_model(
316332
provider_instance = OpenAIProvider(openai_client=client)
317333
return OpenAIChatModel(model_name=model_id, provider=provider_instance)
318334

319-
os.environ["LLM_BASE_URL"] = target_base_url
320-
os.environ["LLM_API_KEY"] = target_api_key
335+
os.environ["OPENAI_BASE_URL"] = target_base_url
336+
os.environ["OPENAI_API_KEY"] = target_api_key
321337
return OpenAIChatModel(model_name=model_id, provider="openai")
322338

323339
elif provider == "anthropic":
324340
if api_key:
325-
os.environ["LLM_API_KEY"] = api_key
341+
os.environ["ANTHROPIC_API_KEY"] = api_key
342+
343+
# AnthropicModel supports http_client directly via some paths,
344+
# but pydantic-ai might prefer we pass the client to the provider or use a custom client
326345

327346
try:
328347
if http_client and AsyncAnthropic and AnthropicProvider:
329348
client = AsyncAnthropic(
330-
api_key=api_key or os.environ.get("LLM_API_KEY"),
349+
api_key=api_key or os.environ.get("ANTHROPIC_API_KEY"),
331350
http_client=http_client,
332351
)
333352
provider_instance = AnthropicProvider(anthropic_client=client)
@@ -338,18 +357,18 @@ def create_model(
338357
return AnthropicModel(model_name=model_id)
339358

340359
elif provider == "google":
341-
# Google generic setup, skipping complex SSL for now as agreed
342360
if api_key:
343-
os.environ["LLM_API_KEY"] = api_key
361+
os.environ["GEMINI_API_KEY"] = api_key
362+
# Google SSL disable is tricky with genai, skipping for now unless specifically requested/researched
344363
return GoogleModel(model_name=model_id)
345364

346365
elif provider == "groq":
347366
if api_key:
348-
os.environ["LLM_API_KEY"] = api_key
367+
os.environ["GROQ_API_KEY"] = api_key
349368

350369
if http_client and AsyncGroq and GroqProvider:
351370
client = AsyncGroq(
352-
api_key=api_key or os.environ.get("LLM_API_KEY"),
371+
api_key=api_key or os.environ.get("GROQ_API_KEY"),
353372
http_client=http_client,
354373
)
355374
provider_instance = GroqProvider(groq_client=client)
@@ -359,18 +378,21 @@ def create_model(
359378

360379
elif provider == "mistral":
361380
if api_key:
362-
os.environ["LLM_API_KEY"] = api_key
381+
os.environ["MISTRAL_API_KEY"] = api_key
363382

364383
if http_client and Mistral and MistralProvider:
365384
# Assuming mistral_client argument for MistralProvider
366385
# Ideally we would verify this, but we'll try standard pattern
367386
pass
387+
# client = Mistral(...) - Mistral SDK might be different
388+
# Skipping Mistral custom client for now to avoid breaking without verification
389+
# If user needs Mistral SSL disable, we'll need to research Mistral SDK + Provider
368390

369391
return MistralModel(model_name=model_id)
370392

371393
elif provider == "huggingface":
372394
if api_key:
373-
os.environ["LLM_API_KEY"] = api_key
395+
os.environ["HUGGING_FACE_API_KEY"] = api_key
374396
return HuggingFaceModel(model_name=model_id)
375397

376398
return OpenAIChatModel(model_name=model_id, provider="openai")

0 commit comments

Comments
 (0)