Skip to content

Commit 4226314

Browse files
Copilotishaan-jaff
andauthored
Add native Responses API support for litellm_proxy provider (#15347)
* Initial plan * Add native Responses API support for litellm_proxy provider Co-authored-by: ishaan-jaff <[email protected]> --------- Co-authored-by: copilot-swe-agent[bot] <[email protected]> Co-authored-by: ishaan-jaff <[email protected]>
1 parent 2f42c80 commit 4226314

File tree

5 files changed

+158
-0
lines changed

5 files changed

+158
-0
lines changed

litellm/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1190,6 +1190,9 @@ def add_known_models():
11901190
from .llms.azure.responses.o_series_transformation import (
11911191
AzureOpenAIOSeriesResponsesAPIConfig,
11921192
)
1193+
from .llms.litellm_proxy.responses.transformation import (
1194+
LiteLLMProxyResponsesAPIConfig,
1195+
)
11931196
from .llms.openai.chat.o_series_transformation import (
11941197
OpenAIOSeriesConfig as OpenAIO1Config, # maintain backwards compatibility
11951198
OpenAIOSeriesConfig,
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
"""
2+
Responses API transformation for LiteLLM Proxy provider.
3+
4+
LiteLLM Proxy supports the OpenAI Responses API natively when the underlying model supports it.
5+
This config enables pass-through behavior to the proxy's /v1/responses endpoint.
6+
"""
7+
8+
from typing import Optional
9+
10+
from litellm.llms.openai.responses.transformation import OpenAIResponsesAPIConfig
11+
from litellm.secret_managers.main import get_secret_str
12+
from litellm.types.utils import LlmProviders
13+
14+
15+
class LiteLLMProxyResponsesAPIConfig(OpenAIResponsesAPIConfig):
16+
"""
17+
Configuration for LiteLLM Proxy Responses API support.
18+
19+
Extends OpenAI's config since the proxy follows OpenAI's API spec,
20+
but uses LITELLM_PROXY_API_BASE for the base URL.
21+
"""
22+
23+
@property
24+
def custom_llm_provider(self) -> LlmProviders:
25+
return LlmProviders.LITELLM_PROXY
26+
27+
def get_complete_url(
28+
self,
29+
api_base: Optional[str],
30+
litellm_params: dict,
31+
) -> str:
32+
"""
33+
Get the endpoint for LiteLLM Proxy responses API.
34+
35+
Uses LITELLM_PROXY_API_BASE environment variable if api_base is not provided.
36+
"""
37+
api_base = api_base or get_secret_str("LITELLM_PROXY_API_BASE")
38+
39+
if api_base is None:
40+
raise ValueError(
41+
"api_base not set for LiteLLM Proxy responses API. "
42+
"Set via api_base parameter or LITELLM_PROXY_API_BASE environment variable"
43+
)
44+
45+
# Remove trailing slashes
46+
api_base = api_base.rstrip("/")
47+
48+
return f"{api_base}/responses"

litellm/utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7305,6 +7305,8 @@ def get_provider_responses_api_config(
73057305
return litellm.AzureOpenAIOSeriesResponsesAPIConfig()
73067306
else:
73077307
return litellm.AzureOpenAIResponsesAPIConfig()
7308+
elif litellm.LlmProviders.LITELLM_PROXY == provider:
7309+
return litellm.LiteLLMProxyResponsesAPIConfig()
73087310
return None
73097311

73107312
@staticmethod

tests/local_testing/test_config.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -401,3 +401,24 @@ def test_provider_config_manager_bedrock_converse_like():
401401
# model="gpt-3.5-turbo", provider=LlmProviders(provider)
402402
# )
403403
# _check_provider_config(config, provider)
404+
405+
406+
def test_litellm_proxy_responses_api_config():
407+
"""Test that litellm_proxy provider returns correct Responses API config"""
408+
from litellm.llms.litellm_proxy.responses.transformation import (
409+
LiteLLMProxyResponsesAPIConfig,
410+
)
411+
412+
config = ProviderConfigManager.get_provider_responses_api_config(
413+
model="litellm_proxy/gpt-4",
414+
provider=LlmProviders.LITELLM_PROXY,
415+
)
416+
print(f"config: {config}")
417+
assert config is not None, "Config should not be None for litellm_proxy provider"
418+
assert isinstance(
419+
config, LiteLLMProxyResponsesAPIConfig
420+
), f"Expected LiteLLMProxyResponsesAPIConfig, got {type(config)}"
421+
assert (
422+
config.custom_llm_provider == LlmProviders.LITELLM_PROXY
423+
), "custom_llm_provider should be LITELLM_PROXY"
424+
Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
"""
2+
Unit test for LiteLLM Proxy Responses API configuration.
3+
"""
4+
5+
import pytest
6+
7+
from litellm.types.utils import LlmProviders
8+
from litellm.utils import ProviderConfigManager
9+
10+
11+
def test_litellm_proxy_responses_api_config():
12+
"""Test that litellm_proxy provider returns correct Responses API config"""
13+
from litellm.llms.litellm_proxy.responses.transformation import (
14+
LiteLLMProxyResponsesAPIConfig,
15+
)
16+
17+
config = ProviderConfigManager.get_provider_responses_api_config(
18+
model="litellm_proxy/gpt-4",
19+
provider=LlmProviders.LITELLM_PROXY,
20+
)
21+
print(f"config: {config}")
22+
assert config is not None, "Config should not be None for litellm_proxy provider"
23+
assert isinstance(
24+
config, LiteLLMProxyResponsesAPIConfig
25+
), f"Expected LiteLLMProxyResponsesAPIConfig, got {type(config)}"
26+
assert (
27+
config.custom_llm_provider == LlmProviders.LITELLM_PROXY
28+
), "custom_llm_provider should be LITELLM_PROXY"
29+
30+
31+
def test_litellm_proxy_responses_api_config_get_complete_url():
32+
"""Test that get_complete_url works correctly"""
33+
import os
34+
from litellm.llms.litellm_proxy.responses.transformation import (
35+
LiteLLMProxyResponsesAPIConfig,
36+
)
37+
38+
config = LiteLLMProxyResponsesAPIConfig()
39+
40+
# Test with explicit api_base
41+
url = config.get_complete_url(
42+
api_base="https://my-proxy.example.com",
43+
litellm_params={},
44+
)
45+
assert url == "https://my-proxy.example.com/responses"
46+
47+
# Test with trailing slash
48+
url = config.get_complete_url(
49+
api_base="https://my-proxy.example.com/",
50+
litellm_params={},
51+
)
52+
assert url == "https://my-proxy.example.com/responses"
53+
54+
# Test that it raises error when api_base is None and env var is not set
55+
if "LITELLM_PROXY_API_BASE" in os.environ:
56+
del os.environ["LITELLM_PROXY_API_BASE"]
57+
58+
with pytest.raises(ValueError, match="api_base not set"):
59+
config.get_complete_url(api_base=None, litellm_params={})
60+
61+
62+
def test_litellm_proxy_responses_api_config_inherits_from_openai():
63+
"""Test that LiteLLMProxyResponsesAPIConfig extends OpenAI config properly"""
64+
from litellm.llms.litellm_proxy.responses.transformation import (
65+
LiteLLMProxyResponsesAPIConfig,
66+
)
67+
from litellm.llms.openai.responses.transformation import (
68+
OpenAIResponsesAPIConfig,
69+
)
70+
71+
config = LiteLLMProxyResponsesAPIConfig()
72+
73+
# Should inherit from OpenAI config
74+
assert isinstance(config, OpenAIResponsesAPIConfig)
75+
76+
# Should have the correct provider set
77+
assert config.custom_llm_provider == LlmProviders.LITELLM_PROXY
78+
79+
80+
if __name__ == "__main__":
81+
test_litellm_proxy_responses_api_config()
82+
test_litellm_proxy_responses_api_config_get_complete_url()
83+
test_litellm_proxy_responses_api_config_inherits_from_openai()
84+
print("All tests passed!")

0 commit comments

Comments
 (0)