Skip to content

Commit 67d47cd

Browse files
committed
chore: pr feedback
Signed-off-by: Ion Koutsouris <[email protected]>
1 parent bb4508b commit 67d47cd

File tree

11 files changed

+4294
-4408
lines changed

11 files changed

+4294
-4408
lines changed
Lines changed: 31 additions & 112 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,25 @@
1-
import logging
21
from typing import Any
32

43
try:
54
from fastapi import APIRouter, HTTPException
65
from fastapi.responses import StreamingResponse
7-
from openai.types import ErrorObject
86
from openai.types.chat.chat_completion import ChatCompletion
97
from openai.types.model import Model
10-
from openai.types.responses import Response
8+
from openai.types.responses import Response as OpenAIResponse
119
except ImportError as _import_error: # pragma: no cover
1210
raise ImportError(
13-
'Please install the `openai` package to enable the fastapi openai compatible endpoint, '
14-
'you can use the `openai` and `fastapi` optional group — `pip install "pydantic-ai-slim[openai,fastapi]"`'
11+
'Please install the `openai` and `fastapi` packages to enable the fastapi openai compatible endpoint, '
12+
'you can use the `chat-completion` optional group — `pip install "pydantic-ai-slim[chat-completion]"`'
1513
) from _import_error
1614

1715
from pydantic_ai.fastapi.api import AgentChatCompletionsAPI, AgentModelsAPI, AgentResponsesAPI
1816
from pydantic_ai.fastapi.data_models import (
1917
ChatCompletionRequest,
20-
ErrorResponse,
2118
ModelsResponse,
2219
ResponsesRequest,
2320
)
2421
from pydantic_ai.fastapi.registry import AgentRegistry
2522

26-
logger = logging.getLogger(__name__)
27-
2823

2924
class AgentAPIRouter(APIRouter):
3025
"""FastAPI Router for Pydantic Agent."""
@@ -34,8 +29,8 @@ def __init__(
3429
agent_registry: AgentRegistry,
3530
disable_response_api: bool = False,
3631
disable_completions_api: bool = False,
37-
*args: tuple[Any],
38-
**kwargs: tuple[Any],
32+
*args: Any,
33+
**kwargs: Any,
3934
):
4035
super().__init__(*args, **kwargs)
4136
self.registry = agent_registry
@@ -48,118 +43,42 @@ def __init__(
4843
# Registers OpenAI/v1 API routes
4944
self._register_routes()
5045

51-
def _register_routes(self) -> None: # noqa: C901
46+
def _register_routes(self) -> None:
5247
if self.enable_completions_api:
5348

54-
@self.post(
55-
'/v1/chat/completions',
56-
response_model=ChatCompletion,
57-
)
58-
async def chat_completions( # type: ignore
49+
@self.post('/v1/chat/completions', response_model=ChatCompletion)
50+
async def chat_completions(
5951
request: ChatCompletionRequest,
6052
) -> ChatCompletion | StreamingResponse:
61-
if not request.messages:
62-
raise HTTPException(
63-
status_code=400,
64-
detail=ErrorResponse(
65-
error=ErrorObject(
66-
type='invalid_request_error',
67-
message='Messages cannot be empty',
68-
),
69-
).model_dump(),
70-
)
71-
try:
72-
if getattr(request, 'stream', False):
73-
return StreamingResponse(
74-
self.completions_api.create_streaming_completion(request),
75-
media_type='text/event-stream',
76-
headers={
77-
'Cache-Control': 'no-cache',
78-
'Connection': 'keep-alive',
79-
'Content-Type': 'text/plain; charset=utf-8',
80-
},
81-
)
82-
else:
83-
return await self.completions_api.create_completion(request)
84-
except Exception as e:
85-
logger.error(f'Error in chat completion: {e}', exc_info=True)
86-
raise HTTPException(
87-
status_code=500,
88-
detail=ErrorResponse(
89-
error=ErrorObject(
90-
type='internal_server_error',
91-
message=str(e),
92-
),
93-
).model_dump(),
53+
if getattr(request, 'stream', False):
54+
return StreamingResponse(
55+
self.completions_api.create_streaming_completion(request),
56+
media_type='text/event-stream',
57+
headers={
58+
'Cache-Control': 'no-cache',
59+
'Connection': 'keep-alive',
60+
'Content-Type': 'text/plain; charset=utf-8',
61+
},
9462
)
63+
else:
64+
return await self.completions_api.create_completion(request)
9565

9666
if self.enable_responses_api:
9767

98-
@self.post(
99-
'/v1/responses',
100-
response_model=Response,
101-
)
102-
async def responses( # type: ignore
68+
@self.post('/v1/responses', response_model=OpenAIResponse)
69+
async def responses(
10370
request: ResponsesRequest,
104-
) -> Response:
105-
if not request.input:
106-
raise HTTPException(
107-
status_code=400,
108-
detail=ErrorResponse(
109-
error=ErrorObject(
110-
type='invalid_request_error',
111-
message='Messages cannot be empty',
112-
),
113-
).model_dump(),
114-
)
115-
try:
116-
if getattr(request, 'stream', False):
117-
# TODO: add streaming support for responses api
118-
raise HTTPException(status_code=501)
119-
else:
120-
return await self.responses_api.create_response(request)
121-
except Exception as e:
122-
logger.error(f'Error in responses: {e}', exc_info=True)
123-
raise HTTPException(
124-
status_code=500,
125-
detail=ErrorResponse(
126-
error=ErrorObject(
127-
type='internal_server_error',
128-
message=str(e),
129-
),
130-
).model_dump(),
131-
)
71+
) -> OpenAIResponse:
72+
if getattr(request, 'stream', False):
73+
# TODO: add streaming support for responses api
74+
raise HTTPException(status_code=501)
75+
else:
76+
return await self.responses_api.create_response(request)
13277

13378
@self.get('/v1/models', response_model=ModelsResponse)
134-
async def get_models() -> ModelsResponse: # type: ignore
135-
try:
136-
return await self.models_api.list_models()
137-
except Exception as e:
138-
logger.error(f'Error listing models: {e}', exc_info=True)
139-
raise HTTPException(
140-
status_code=500,
141-
detail=ErrorResponse(
142-
error=ErrorObject(
143-
type='internal_server_error',
144-
message=f'Error retrieving models: {str(e)}',
145-
),
146-
).model_dump(),
147-
)
79+
async def get_models() -> ModelsResponse:
80+
return await self.models_api.list_models()
14881

14982
@self.get('/v1/models' + '/{model_id}', response_model=Model)
150-
async def get_model(model_id: str) -> Model: # type: ignore
151-
try:
152-
return await self.models_api.get_model(model_id)
153-
except HTTPException:
154-
raise
155-
except Exception as e:
156-
logger.error(f'Error fetching model info: {e}', exc_info=True)
157-
raise HTTPException(
158-
status_code=500,
159-
detail=ErrorResponse(
160-
error=ErrorObject(
161-
type='internal_server_error',
162-
message=f'Error retrieving model: {str(e)}',
163-
),
164-
).model_dump(),
165-
)
83+
async def get_model(model_id: str) -> Model:
84+
return await self.models_api.get_model(model_id)

pydantic_ai_slim/pydantic_ai/fastapi/api/completions.py

Lines changed: 12 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import json
2-
import logging
32
import time
43
from collections.abc import AsyncGenerator
54
from typing import Any
@@ -11,8 +10,8 @@
1110
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, Choice as Chunkhoice, ChoiceDelta
1211
except ImportError as _import_error: # pragma: no cover
1312
raise ImportError(
14-
'Please install the `openai` package to enable the fastapi openai compatible endpoint, '
15-
'you can use the `openai` and `fastapi` optional group — `pip install "pydantic-ai-slim[openai,fastapi]"`'
13+
'Please install the `openai` and `fastapi` packages to enable the fastapi openai compatible endpoint, '
14+
'you can use the `chat-completion` optional group — `pip install "pydantic-ai-slim[chat-completion]"`'
1615
) from _import_error
1716

1817
from pydantic import TypeAdapter
@@ -26,8 +25,6 @@
2625
from pydantic_ai.fastapi.registry import AgentRegistry
2726
from pydantic_ai.settings import ModelSettings
2827

29-
logger = logging.getLogger(__name__)
30-
3128

3229
class AgentChatCompletionsAPI:
3330
"""Chat completions API openai <-> pydantic-ai conversion."""
@@ -60,23 +57,18 @@ async def create_completion(self, request: ChatCompletionRequest) -> ChatComplet
6057
model_settings_ta = TypeAdapter(ModelSettings)
6158
messages = openai_chat_completions_2pai(messages=request.messages)
6259

63-
try:
64-
async with agent:
65-
result = await agent.run(
66-
message_history=messages,
67-
model_settings=model_settings_ta.validate_python(
68-
{k: v for k, v in request.model_dump().items() if v is not None},
69-
),
70-
)
71-
72-
return pai_result_to_openai_completions(
73-
result=result,
74-
model=model_name,
60+
async with agent:
61+
result = await agent.run(
62+
message_history=messages,
63+
model_settings=model_settings_ta.validate_python(
64+
{k: v for k, v in request.model_dump().items() if v is not None},
65+
),
7566
)
7667

77-
except Exception as e:
78-
logger.error(f'Error creating completion: {e}')
79-
raise
68+
return pai_result_to_openai_completions(
69+
result=result,
70+
model=model_name,
71+
)
8072

8173
async def create_streaming_completion(self, request: ChatCompletionRequest) -> AsyncGenerator[str]:
8274
"""Create a streaming chat completion."""

pydantic_ai_slim/pydantic_ai/fastapi/api/models.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import logging
21
import time
32

43
try:
@@ -7,8 +6,8 @@
76
from openai.types.model import Model
87
except ImportError as _import_error: # pragma: no cover
98
raise ImportError(
10-
'Please install the `openai` package to enable the fastapi openai compatible endpoint, '
11-
'you can use the `openai` and `fastapi` optional group — `pip install "pydantic-ai-slim[openai,fastapi]"`'
9+
'Please install the `openai` and `fastapi` packages to enable the fastapi openai compatible endpoint, '
10+
'you can use the `chat-completion` optional group — `pip install "pydantic-ai-slim[chat-completion]"`'
1211
) from _import_error
1312

1413
from pydantic_ai.fastapi.data_models import (
@@ -17,8 +16,6 @@
1716
)
1817
from pydantic_ai.fastapi.registry import AgentRegistry
1918

20-
logger = logging.getLogger(__name__)
21-
2219

2320
class AgentModelsAPI:
2421
"""Models API for pydantic-ai agents."""

pydantic_ai_slim/pydantic_ai/fastapi/api/responses.py

Lines changed: 10 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import logging
21
from collections.abc import AsyncGenerator
32

43
try:
@@ -7,8 +6,8 @@
76
from openai.types.responses import Response
87
except ImportError as _import_error: # pragma: no cover
98
raise ImportError(
10-
'Please install the `openai` package to enable the fastapi openai compatible endpoint, '
11-
'you can use the `openai` and `fastapi` optional group — `pip install "pydantic-ai-slim[openai,fastapi]"`'
9+
'Please install the `openai` and `fastapi` packages to enable the fastapi openai compatible endpoint, '
10+
'you can use the `chat-completion` optional group — `pip install "pydantic-ai-slim[chat-completion]"`'
1211
) from _import_error
1312

1413
from pydantic_ai import Agent
@@ -22,8 +21,6 @@
2221
OpenAIResponsesModelSettings,
2322
)
2423

25-
logger = logging.getLogger(__name__)
26-
2724

2825
class AgentResponsesAPI:
2926
"""Responses API openai <-> pydantic-ai conversion."""
@@ -56,19 +53,15 @@ async def create_response(self, request: ResponsesRequest) -> Response:
5653
model_settings = OpenAIResponsesModelSettings(openai_previous_response_id='auto')
5754
messages = openai_responses_input_to_pai(items=request.input)
5855

59-
try:
60-
async with agent:
61-
result = await agent.run(
62-
message_history=messages,
63-
model_settings=model_settings,
64-
)
65-
return pai_result_to_openai_responses(
66-
result=result,
67-
model=model_name,
56+
async with agent:
57+
result = await agent.run(
58+
message_history=messages,
59+
model_settings=model_settings,
6860
)
69-
except Exception as e:
70-
logger.error(f'Error creating completion: {e}')
71-
raise
61+
return pai_result_to_openai_responses(
62+
result=result,
63+
model=model_name,
64+
)
7265

7366
async def create_streaming_response(self, request: ResponsesRequest) -> AsyncGenerator[str]:
7467
"""Create a streaming chat completion."""

pydantic_ai_slim/pydantic_ai/fastapi/convert/convert_messages.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@
2121
from openai.types.responses.response_usage import InputTokensDetails, OutputTokensDetails
2222
except ImportError as _import_error: # pragma: no cover
2323
raise ImportError(
24-
'Please install the `openai` package to enable the fastapi openai compatible endpoint, '
25-
'you can use the `openai` and `fastapi` optional group — `pip install "pydantic-ai-slim[openai,fastapi]"`'
24+
'Please install the `openai` and `fastapi` packages to enable the fastapi openai compatible endpoint, '
25+
'you can use the `chat-completion` optional group — `pip install "pydantic-ai-slim[chat-completion]"`'
2626
) from _import_error
2727

2828
from pydantic import TypeAdapter

pydantic_ai_slim/pydantic_ai/fastapi/data_models/models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@
2222
)
2323
except ImportError as _import_error: # pragma: no cover
2424
raise ImportError(
25-
'Please install the `openai` package to enable the fastapi openai compatible endpoint, '
26-
'you can use the `openai` and `fastapi` optional group — `pip install "pydantic-ai-slim[openai,fastapi]"`'
25+
'Please install the `openai` and `fastapi` packages to enable the fastapi openai compatible endpoint, '
26+
'you can use the `chat-completion` optional group — `pip install "pydantic-ai-slim[chat-completion]"`'
2727
) from _import_error
2828
from pydantic import BaseModel, ConfigDict, Field
2929

pydantic_ai_slim/pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ dependencies = [
6868
logfire = ["logfire[httpx]>=3.14.1"]
6969
# Models
7070
openai = ["openai>=1.107.2"]
71-
fastapi = ["fastapi"]
7271
cohere = ["cohere>=5.18.0; platform_system != 'Emscripten'"]
7372
vertexai = ["google-auth>=2.36.0", "requests>=2.32.2"]
7473
google = ["google-genai>=1.46.0"]
@@ -112,6 +111,8 @@ temporal = ["temporalio==1.18.0"]
112111
dbos = ["dbos>=1.14.0"]
113112
# Prefect
114113
prefect = ["prefect>=3.4.21"]
114+
# Chat completions/responses endpoint
115+
chat-completion = ["openai>=1.107.2", "fastapi"]
115116

116117
[tool.hatch.metadata]
117118
allow-direct-references = true

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ requires-python = ">=3.10"
4646

4747
[tool.hatch.metadata.hooks.uv-dynamic-versioning]
4848
dependencies = [
49-
"pydantic-ai-slim[openai,vertexai,google,groq,anthropic,mistral,cohere,bedrock,huggingface,cli,mcp,fastmcp,evals,ag-ui,retries,temporal,logfire,ui,fastapi]=={{ version }}",
49+
"pydantic-ai-slim[openai,vertexai,google,groq,anthropic,mistral,cohere,bedrock,huggingface,cli,mcp,fastmcp,evals,ag-ui,retries,temporal,logfire,ui,chat-completion]=={{ version }}",
5050
]
5151

5252
[tool.hatch.metadata.hooks.uv-dynamic-versioning.optional-dependencies]

0 commit comments

Comments
 (0)