|
1 | 1 | from __future__ import annotations as _annotations |
2 | 2 |
|
3 | | -from typing import Literal |
4 | | - |
5 | | -import fastapi |
6 | | -import httpx |
7 | 3 | import logfire |
8 | | -from fastapi import Request, Response |
9 | | -from fastapi.responses import HTMLResponse |
10 | | -from pydantic import BaseModel |
11 | | -from pydantic.alias_generators import to_camel |
12 | 4 | from pydantic_ai.builtin_tools import ( |
13 | | - AbstractBuiltinTool, |
14 | 5 | CodeExecutionTool, |
15 | 6 | ImageGenerationTool, |
16 | 7 | WebSearchTool, |
17 | 8 | ) |
18 | | -from pydantic_ai.ui.vercel_ai import VercelAIAdapter |
19 | 9 |
|
20 | 10 | from .agent import agent |
21 | 11 |
|
22 | 12 | # 'if-token-present' means nothing will be sent (and the example will work) if you don't have logfire configured |
23 | 13 | logfire.configure(send_to_logfire='if-token-present') |
24 | 14 | logfire.instrument_pydantic_ai() |
25 | 15 |
|
26 | | -app = fastapi.FastAPI() |
27 | | -logfire.instrument_fastapi(app) |
28 | | - |
29 | | - |
30 | | -@app.options('/api/chat') |
31 | | -def options_chat(): |
32 | | - pass |
33 | | - |
34 | | - |
35 | | -AIModelID = Literal[ |
36 | | - 'anthropic:claude-sonnet-4-5', |
37 | | - 'openai-responses:gpt-5', |
38 | | - 'google-gla:gemini-2.5-pro', |
39 | | -] |
40 | | -BuiltinToolID = Literal['web_search', 'image_generation', 'code_execution'] |
41 | | - |
42 | | - |
43 | | -class AIModel(BaseModel): |
44 | | - id: AIModelID |
45 | | - name: str |
46 | | - builtin_tools: list[BuiltinToolID] |
47 | | - |
48 | | - |
49 | | -class BuiltinTool(BaseModel): |
50 | | - id: BuiltinToolID |
51 | | - name: str |
52 | | - |
53 | | - |
54 | | -BUILTIN_TOOL_DEFS: list[BuiltinTool] = [ |
55 | | - BuiltinTool(id='web_search', name='Web Search'), |
56 | | - BuiltinTool(id='code_execution', name='Code Execution'), |
57 | | - BuiltinTool(id='image_generation', name='Image Generation'), |
58 | | -] |
59 | | - |
60 | | -BUILTIN_TOOLS: dict[BuiltinToolID, AbstractBuiltinTool] = { |
61 | | - 'web_search': WebSearchTool(), |
62 | | - 'code_execution': CodeExecutionTool(), |
63 | | - 'image_generation': ImageGenerationTool(), |
64 | | -} |
65 | | - |
66 | | -AI_MODELS: list[AIModel] = [ |
67 | | - AIModel( |
68 | | - id='anthropic:claude-sonnet-4-5', |
69 | | - name='Claude Sonnet 4.5', |
70 | | - builtin_tools=[ |
71 | | - 'web_search', |
72 | | - 'code_execution', |
73 | | - ], |
74 | | - ), |
75 | | - AIModel( |
76 | | - id='openai-responses:gpt-5', |
77 | | - name='GPT 5', |
78 | | - builtin_tools=[ |
79 | | - 'web_search', |
80 | | - 'code_execution', |
81 | | - 'image_generation', |
82 | | - ], |
83 | | - ), |
84 | | - AIModel( |
85 | | - id='google-gla:gemini-2.5-pro', |
86 | | - name='Gemini 2.5 Pro', |
87 | | - builtin_tools=[ |
88 | | - 'web_search', |
89 | | - 'code_execution', |
90 | | - ], |
91 | | - ), |
92 | | -] |
93 | | - |
94 | | - |
95 | | -class ConfigureFrontend(BaseModel, alias_generator=to_camel, populate_by_name=True): |
96 | | - models: list[AIModel] |
97 | | - builtin_tools: list[BuiltinTool] |
98 | | - |
99 | | - |
100 | | -@app.get('/api/configure') |
101 | | -async def configure_frontend() -> ConfigureFrontend: |
102 | | - return ConfigureFrontend( |
103 | | - models=AI_MODELS, |
104 | | - builtin_tools=BUILTIN_TOOL_DEFS, |
105 | | - ) |
106 | | - |
107 | | - |
108 | | -class ChatRequestExtra(BaseModel, extra='ignore', alias_generator=to_camel): |
109 | | - model: AIModelID | None = None |
110 | | - builtin_tools: list[BuiltinToolID] = [] |
111 | | - |
112 | | - |
113 | | -@app.post('/api/chat') |
114 | | -async def post_chat(request: Request) -> Response: |
115 | | - run_input = VercelAIAdapter.build_run_input(await request.body()) |
116 | | - extra_data = ChatRequestExtra.model_validate(run_input.__pydantic_extra__) |
117 | | - return await VercelAIAdapter.dispatch_request( |
118 | | - request, |
119 | | - agent=agent, |
120 | | - model=extra_data.model, |
121 | | - builtin_tools=[BUILTIN_TOOLS[tool_id] for tool_id in extra_data.builtin_tools], |
122 | | - ) |
123 | | - |
124 | | - |
125 | | -@app.get('/') |
126 | | -@app.get('/{id}') |
127 | | -async def index(request: Request): |
128 | | - async with httpx.AsyncClient() as client: |
129 | | - response = await client.get( |
130 | | - 'https://cdn.jsdelivr.net/npm/@pydantic/ai-chat-ui@0.0.2/dist/index.html' |
131 | | - ) |
132 | | - return HTMLResponse(content=response.content, status_code=response.status_code) |
| 16 | +app = agent.to_web( |
| 17 | + models={ |
| 18 | + 'Claude Sonnet 4.5': 'anthropic:claude-sonnet-4-5', |
| 19 | + 'GPT 5': 'openai-responses:gpt-5', |
| 20 | + 'Gemini 2.5 Pro': 'google-gla:gemini-2.5-pro', |
| 21 | + }, |
| 22 | + builtin_tools=[ |
| 23 | + WebSearchTool(), |
| 24 | + CodeExecutionTool(), |
| 25 | + ImageGenerationTool(), |
| 26 | + ], |
| 27 | +) |
| 28 | +logfire.instrument_starlette(app) |
0 commit comments