Skip to content

Commit 78f1a2d

Browse files
tanbroclaude
andcommitted
fix: revert incorrect base_url fallback logic that breaks task execution
PR xorbitsai#138 (cee1e49) introduced a provider compatibility system that incorrectly overrode user-configured base URLs with default values. The problematic code used: base_url=model.base_url or default_base_url_for_provider(...) This fails when base_url is an empty string ("") because Python's `or` operator treats empty strings as falsy, causing the fallback to activate even when the user explicitly configured a base URL. For OpenAI-compatible providers like DashScope that store base_url="" in the database, this caused requests to be sent to api.openai.com instead of their configured endpoints. As a result, task execution failed to start completely - tasks showed blank responses with no backend execution activity. This fix reverts to direct provider string comparison and removes the fallback logic, trusting the database configuration as-is. Fixes task execution regression introduced in cee1e49 (PR xorbitsai#138) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent cee1e49 commit 78f1a2d

File tree

2 files changed

+14
-28
lines changed

2 files changed

+14
-28
lines changed

src/xagent/core/model/chat/basic/adapter.py

Lines changed: 12 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
11
import os
22

33
from ....model import ChatModelConfig, ModelConfig
4-
from ....model.providers import (
5-
default_base_url_for_provider,
6-
provider_compatibility_for_provider,
7-
)
84
from ....retry import create_retry_wrapper
95
from ..error import retry_on
106
from .azure_openai import AzureOpenAILLM
@@ -23,25 +19,11 @@ def create_base_llm(model: ModelConfig) -> BaseLLM:
2319
if not isinstance(model, ChatModelConfig):
2420
raise TypeError(f"Invalid model type: {type(model).__name__}")
2521

26-
compatibility = provider_compatibility_for_provider(model.model_provider)
27-
28-
if compatibility == "openai_compatible":
22+
if model.model_provider == "openai":
2923
llm: BaseLLM = OpenAILLM(
3024
model_name=model.model_name,
3125
api_key=model.api_key,
32-
base_url=model.base_url
33-
or default_base_url_for_provider(model.model_provider),
34-
default_temperature=model.default_temperature,
35-
default_max_tokens=model.default_max_tokens,
36-
timeout=model.timeout,
37-
abilities=model.abilities,
38-
)
39-
elif compatibility == "claude_compatible":
40-
llm = ClaudeLLM(
41-
model_name=model.model_name,
42-
api_key=model.api_key,
43-
base_url=model.base_url
44-
or default_base_url_for_provider(model.model_provider),
26+
base_url=model.base_url,
4527
default_temperature=model.default_temperature,
4628
default_max_tokens=model.default_max_tokens,
4729
timeout=model.timeout,
@@ -78,6 +60,16 @@ def create_base_llm(model: ModelConfig) -> BaseLLM:
7860
timeout=model.timeout,
7961
abilities=model.abilities,
8062
)
63+
elif model.model_provider == "claude":
64+
llm = ClaudeLLM(
65+
model_name=model.model_name,
66+
api_key=model.api_key,
67+
base_url=model.base_url,
68+
default_temperature=model.default_temperature,
69+
default_max_tokens=model.default_max_tokens,
70+
timeout=model.timeout,
71+
abilities=model.abilities,
72+
)
8173
elif model.model_provider == "xinference":
8274
llm = XinferenceLLM(
8375
model_name=model.model_name,

src/xagent/core/model/chat/langchain.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,6 @@
1010
from langchain_openai import AzureChatOpenAI, ChatOpenAI
1111

1212
from ...model import ChatModelConfig, ModelConfig
13-
from ...model.providers import (
14-
default_base_url_for_provider,
15-
provider_compatibility_for_provider,
16-
)
1713
from ...retry import ExponentialBackoff, RetryStrategy, create_retry_wrapper
1814
from .error import retry_on
1915

@@ -101,16 +97,14 @@ def create_base_chat_model(
10197
raise TypeError(f"Unsupported Chat model type: {type(model).__name__}")
10298

10399
temp = temperature if temperature is not None else model.default_temperature
104-
compatibility = provider_compatibility_for_provider(model.model_provider)
105100

106-
if compatibility == "openai_compatible":
101+
if model.model_provider == "openai":
107102
return ChatOpenAI(
108103
model=model.model_name,
109104
temperature=temp,
110105
max_tokens=model.default_max_tokens,
111106
api_key=model.api_key,
112-
base_url=model.base_url
113-
or default_base_url_for_provider(model.model_provider),
107+
base_url=model.base_url,
114108
timeout=model.timeout,
115109
)
116110
elif model.model_provider == "zhipu":

0 commit comments

Comments
 (0)