|
| 1 | +"""PydanticAI + GitHub MCP example. |
| 2 | +
|
| 3 | +This example creates an MCP server adapter that points at the GitHub MCP |
| 4 | +endpoint, lists available tools, filters them to a small set useful for |
| 5 | +triaging issues, and then sends those tools to a PydanticAI Agent which |
| 6 | +produces a structured IssueProposal. |
| 7 | +
|
| 8 | +Prerequisites: |
| 9 | +- Set GITHUB_TOKEN in your environment or in a .env file. |
| 10 | +- The GitHub MCP endpoint must be reachable from your environment. |
| 11 | +
|
| 12 | +Usage: |
| 13 | + python examples/pydanticai_mcp_github.py |
| 14 | +""" |
| 15 | + |
| 16 | +import asyncio |
| 17 | +import json |
| 18 | +import logging |
| 19 | +import os |
| 20 | + |
| 21 | +import azure.identity |
| 22 | +from dotenv import load_dotenv |
| 23 | +from openai import AsyncAzureOpenAI, AsyncOpenAI |
| 24 | +from pydantic import BaseModel, Field |
| 25 | +from pydantic_ai import Agent, CallToolsNode, ModelRequestNode |
| 26 | +from pydantic_ai.mcp import MCPServerStreamableHTTP |
| 27 | +from pydantic_ai.messages import ( |
| 28 | + ToolReturnPart, |
| 29 | +) |
| 30 | +from pydantic_ai.models.openai import OpenAIChatModel |
| 31 | +from pydantic_ai.providers.openai import OpenAIProvider |
| 32 | +from rich import print |
| 33 | +from rich.logging import RichHandler |
| 34 | + |
| 35 | +logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()]) |
| 36 | +logger = logging.getLogger("pydanticai_mcp_github") |
| 37 | + |
| 38 | + |
| 39 | +load_dotenv(override=True) |
| 40 | +API_HOST = os.getenv("API_HOST", "github") |
| 41 | + |
| 42 | + |
| 43 | +if API_HOST == "azure": |
| 44 | + token_provider = azure.identity.get_bearer_token_provider( |
| 45 | + azure.identity.DefaultAzureCredential(), |
| 46 | + "https://cognitiveservices.azure.com/.default", |
| 47 | + ) |
| 48 | + client = AsyncAzureOpenAI( |
| 49 | + api_version=os.environ["AZURE_OPENAI_VERSION"], |
| 50 | + azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], |
| 51 | + azure_ad_token_provider=token_provider, |
| 52 | + ) |
| 53 | + model = OpenAIChatModel( |
| 54 | + os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"], |
| 55 | + provider=OpenAIProvider(openai_client=client), |
| 56 | + ) |
| 57 | +elif API_HOST == "github": |
| 58 | + client = AsyncOpenAI(api_key=os.environ["GITHUB_TOKEN"], base_url="https://models.inference.ai.azure.com") |
| 59 | + model = OpenAIChatModel( |
| 60 | + os.environ.get("GITHUB_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client) |
| 61 | + ) |
| 62 | +elif API_HOST == "ollama": |
| 63 | + client = AsyncOpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="none") |
| 64 | + model = OpenAIChatModel(os.environ["OLLAMA_MODEL"], provider=OpenAIProvider(openai_client=client)) |
| 65 | +else: |
| 66 | + client = AsyncOpenAI() |
| 67 | + model = OpenAIChatModel( |
| 68 | + os.environ.get("OPENAI_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client) |
| 69 | + ) |
| 70 | + |
| 71 | + |
| 72 | +class IssueProposal(BaseModel): |
| 73 | + """Structured proposal for closing an issue.""" |
| 74 | + |
| 75 | + url: str = Field(description="URL of the issue") |
| 76 | + title: str = Field(description="Title of the issue") |
| 77 | + summary: str = Field(description="Brief summary of the issue and signals for closing") |
| 78 | + should_close: bool = Field(description="Whether the issue should be closed or not") |
| 79 | + reply_message: str = Field(description="Message to post when closing the issue, if applicable") |
| 80 | + |
| 81 | + |
| 82 | +async def main(): |
| 83 | + server = MCPServerStreamableHTTP( |
| 84 | + url="https://api.githubcopilot.com/mcp/", headers={"Authorization": f"Bearer {os.getenv('GITHUB_TOKEN', '')}"} |
| 85 | + ) |
| 86 | + desired_tool_names = ("list_issues", "search_code", "search_issues", "search_pull_requests") |
| 87 | + filtered_tools = server.filtered(lambda ctx, tool_def: tool_def.name in desired_tool_names) |
| 88 | + |
| 89 | + agent: Agent[None, IssueProposal] = Agent( |
| 90 | + model, |
| 91 | + system_prompt=( |
| 92 | + "You are an issue triage assistant. Use the provided tools to find an issue that can be closed " |
| 93 | + "and produce an IssueProposal." |
| 94 | + ), |
| 95 | + output_type=IssueProposal, |
| 96 | + toolsets=[filtered_tools], |
| 97 | + ) |
| 98 | + |
| 99 | + user_content = "Find an issue from Azure-samples azure-search-openai-demo that can be closed." |
| 100 | + async with agent.iter(user_content) as agent_run: |
| 101 | + async for node in agent_run: |
| 102 | + if isinstance(node, CallToolsNode): |
| 103 | + tool_call = node.model_response.parts[0] |
| 104 | + logger.info(f"Calling tool '{tool_call.tool_name}' with args:\n{tool_call.args}") |
| 105 | + elif isinstance(node, ModelRequestNode) and isinstance(node.request.parts[0], ToolReturnPart): |
| 106 | + tool_return_value = json.dumps(node.request.parts[0].content) |
| 107 | + logger.info(f"Got tool result:\n{tool_return_value[0:200]}...") |
| 108 | + |
| 109 | + print(agent_run.result.output) |
| 110 | + |
| 111 | + |
| 112 | +if __name__ == "__main__": |
| 113 | + logger.setLevel(logging.INFO) |
| 114 | + asyncio.run(main()) |
0 commit comments