Skip to content

Commit 617ea20

Browse files
authored
Merge pull request #28 from Azure-Samples/pydanticmcp
Add Pydantic + GitHub example
2 parents 06d3763 + e3eb5b8 commit 617ea20

File tree

4 files changed

+122
-7
lines changed

4 files changed

+122
-7
lines changed

README.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,8 @@ This repository provides examples of many popular Python AI agent frameworks usi
2323
* [VS Code Dev Containers](#vs-code-dev-containers)
2424
* [Local environment](#local-environment)
2525
* [Running the Python examples](#running-the-python-examples)
26-
* [Guidance](#guidance)
27-
* [Costs](#costs)
28-
* [Security guidelines](#security-guidelines)
26+
* [Configuring GitHub Models](#configuring-github-models)
27+
* [Provisioning Azure AI resources](#provisioning-azure-ai-resources)
2928
* [Resources](#resources)
3029

3130
## Getting started
@@ -130,6 +129,7 @@ You can run the examples in this repository by executing the scripts in the `exa
130129
| [pydanticai_graph.py](examples/pydanticai_graph.py) | Uses PydanticAI with pydantic-graph to build a small question/answer evaluation graph. |
131130
| [pydanticai_tools.py](examples/pydanticai_tools.py) | Uses PydanticAI with multiple Python tools for weekend activity planning. |
132131
| [pydanticai_mcp_http.py](examples/pydanticai_mcp_http.py) | Uses PydanticAI with an MCP HTTP server toolset for travel planning (hotel search). |
132+
| [pydanticai_mcp_github.py](examples/pydanticai_mcp_github.py) | Uses PydanticAI with an MCP GitHub server toolset to triage repository issues. |
133133

134134
### Semantic Kernel
135135

examples/langchainv1_mcp_github.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,12 +77,13 @@ async def main():
7777
)
7878

7979
tools = await mcp_client.get_tools()
80-
tools = [t for t in tools if t.name in ("list_issues", "search_code", "search_issues", "search_pull_requests")]
80+
desired_tool_names = ("list_issues", "search_code", "search_issues", "search_pull_requests")
81+
filtered_tools = [t for t in tools if t.name in desired_tool_names]
8182

8283
prompt_path = Path(__file__).parent / "triager.prompt.md"
8384
with prompt_path.open("r", encoding="utf-8") as f:
8485
prompt = f.read()
85-
agent = create_agent(base_model, prompt=prompt, tools=tools, response_format=IssueProposal)
86+
agent = create_agent(base_model, prompt=prompt, tools=filtered_tools, response_format=IssueProposal)
8687

8788
user_content = "Find an issue from Azure-samples azure-search-openai-demo that can be closed."
8889
async for step in agent.astream({"messages": [HumanMessage(content=user_content)]}, stream_mode="updates"):

examples/pydanticai_mcp_github.py

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
"""PydanticAI + GitHub MCP example.
2+
3+
This example creates an MCP server adapter that points at the GitHub MCP
4+
endpoint, lists available tools, filters them to a small set useful for
5+
triaging issues, and then sends those tools to a PydanticAI Agent which
6+
produces a structured IssueProposal.
7+
8+
Prerequisites:
9+
- Set GITHUB_TOKEN in your environment or in a .env file.
10+
- The GitHub MCP endpoint must be reachable from your environment.
11+
12+
Usage:
13+
python examples/pydanticai_mcp_github.py
14+
"""
15+
16+
import asyncio
17+
import json
18+
import logging
19+
import os
20+
21+
import azure.identity
22+
from dotenv import load_dotenv
23+
from openai import AsyncAzureOpenAI, AsyncOpenAI
24+
from pydantic import BaseModel, Field
25+
from pydantic_ai import Agent, CallToolsNode, ModelRequestNode
26+
from pydantic_ai.mcp import MCPServerStreamableHTTP
27+
from pydantic_ai.messages import (
28+
ToolReturnPart,
29+
)
30+
from pydantic_ai.models.openai import OpenAIChatModel
31+
from pydantic_ai.providers.openai import OpenAIProvider
32+
from rich import print
33+
from rich.logging import RichHandler
34+
35+
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
36+
logger = logging.getLogger("pydanticai_mcp_github")
37+
38+
39+
load_dotenv(override=True)
40+
API_HOST = os.getenv("API_HOST", "github")
41+
42+
43+
if API_HOST == "azure":
44+
token_provider = azure.identity.get_bearer_token_provider(
45+
azure.identity.DefaultAzureCredential(),
46+
"https://cognitiveservices.azure.com/.default",
47+
)
48+
client = AsyncAzureOpenAI(
49+
api_version=os.environ["AZURE_OPENAI_VERSION"],
50+
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
51+
azure_ad_token_provider=token_provider,
52+
)
53+
model = OpenAIChatModel(
54+
os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
55+
provider=OpenAIProvider(openai_client=client),
56+
)
57+
elif API_HOST == "github":
58+
client = AsyncOpenAI(api_key=os.environ["GITHUB_TOKEN"], base_url="https://models.inference.ai.azure.com")
59+
model = OpenAIChatModel(
60+
os.environ.get("GITHUB_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client)
61+
)
62+
elif API_HOST == "ollama":
63+
client = AsyncOpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="none")
64+
model = OpenAIChatModel(os.environ["OLLAMA_MODEL"], provider=OpenAIProvider(openai_client=client))
65+
else:
66+
client = AsyncOpenAI()
67+
model = OpenAIChatModel(
68+
os.environ.get("OPENAI_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client)
69+
)
70+
71+
72+
class IssueProposal(BaseModel):
73+
"""Structured proposal for closing an issue."""
74+
75+
url: str = Field(description="URL of the issue")
76+
title: str = Field(description="Title of the issue")
77+
summary: str = Field(description="Brief summary of the issue and signals for closing")
78+
should_close: bool = Field(description="Whether the issue should be closed or not")
79+
reply_message: str = Field(description="Message to post when closing the issue, if applicable")
80+
81+
82+
async def main():
83+
server = MCPServerStreamableHTTP(
84+
url="https://api.githubcopilot.com/mcp/", headers={"Authorization": f"Bearer {os.getenv('GITHUB_TOKEN', '')}"}
85+
)
86+
desired_tool_names = ("list_issues", "search_code", "search_issues", "search_pull_requests")
87+
filtered_tools = server.filtered(lambda ctx, tool_def: tool_def.name in desired_tool_names)
88+
89+
agent: Agent[None, IssueProposal] = Agent(
90+
model,
91+
system_prompt=(
92+
"You are an issue triage assistant. Use the provided tools to find an issue that can be closed "
93+
"and produce an IssueProposal."
94+
),
95+
output_type=IssueProposal,
96+
toolsets=[filtered_tools],
97+
)
98+
99+
user_content = "Find an issue from Azure-samples azure-search-openai-demo that can be closed."
100+
async with agent.iter(user_content) as agent_run:
101+
async for node in agent_run:
102+
if isinstance(node, CallToolsNode):
103+
tool_call = node.model_response.parts[0]
104+
logger.info(f"Calling tool '{tool_call.tool_name}' with args:\n{tool_call.args}")
105+
elif isinstance(node, ModelRequestNode) and isinstance(node.request.parts[0], ToolReturnPart):
106+
tool_return_value = json.dumps(node.request.parts[0].content)
107+
logger.info(f"Got tool result:\n{tool_return_value[0:200]}...")
108+
109+
print(agent_run.result.output)
110+
111+
112+
if __name__ == "__main__":
113+
logger.setLevel(logging.INFO)
114+
asyncio.run(main())

requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@ azure-ai-inference==1.0.0b9
1313
openai-agents
1414
semantic-kernel
1515
langgraph
16-
langchain==1.0.0a4
16+
langchain==1.0.0a5
1717
langchain_openai
18-
pydantic-ai==1.0.0b1
18+
pydantic-ai==1.0.8
1919
llama-index
2020
llama-index-llms-azure-openai
2121
llama-index-llms-openai-like

0 commit comments

Comments
 (0)