diff --git a/README.md b/README.md
index b0ef509..939036a 100644
--- a/README.md
+++ b/README.md
@@ -11,8 +11,9 @@ Both single and multiple MCP server examples are demonstrated
- [Agent with multiple MCP servers](agents_mcp_usage/multi_mcp/README.md)
- Also includes Agent evaluations
-The repo also includes a Python MCP Server [`run_server.py`](run_server.py) based on [MCP Python SDK Quickstart](https://github.com/modelcontextprotocol/python-sdk/blob/b4c7db6a50a5c88bae1db5c1f7fba44d16eebc6e/README.md?plain=1#L104)
-- Modified to include a datetime tool and run as a server invoked by Agents
+The repo also includes Python MCP Servers:
+- [`example_server.py`](mcp_servers/example_server.py) based on [MCP Python SDK Quickstart](https://github.com/modelcontextprotocol/python-sdk/blob/b4c7db6a50a5c88bae1db5c1f7fba44d16eebc6e/README.md?plain=1#L104) - Modified to include a datetime tool and run as a server invoked by Agents
+- [`mermaid_validator.py`](mcp_servers/mermaid_validator.py) - Mermaid diagram validation server using mermaid-cli
Tracing is done through Pydantic Logfire.
@@ -67,8 +68,9 @@ This project aims to teach:
- **eval_multi_mcp/** - Contains evaluation examples for multi-MCP usage:
- `evals_pydantic_mcp.py` - Example of evaluating the use of multiple MCP servers with Pydantic-AI
-- **Demo Python MCP Server**
- - `run_server.py` - Simple MCP server that runs locally, implemented in Python
+- **Demo Python MCP Servers**
+ - `mcp_servers/example_server.py` - Simple MCP server that runs locally, implemented in Python
+ - `mcp_servers/mermaid_validator.py` - Mermaid diagram validation MCP server, implemented in Python
## Basic MCP: Single Server Usage
@@ -104,7 +106,7 @@ graph LR
end
subgraph "Python MCP Server"
- MCP["Model Context Protocol Server
(run_server.py)"]
+ MCP["Model Context Protocol Server
(mcp_servers/example_server.py)"]
Tools["Tools
- add(a, b)
- get_current_time()"]
Resources["Resources
- greeting://{name}"]
MCP --- Tools
@@ -177,8 +179,8 @@ graph LR
end
subgraph "MCP Servers"
- PythonMCP["Python MCP Server
(run_server.py)"]
- NodeMCP["Node.js MCP Server
(mermaid-validator)"]
+ PythonMCP["Python MCP Server
(mcp_servers/example_server.py)"]
+ MermaidMCP["Python Mermaid MCP Server
(mcp_servers/mermaid_validator.py)"]
Tools["Tools
- add(a, b)
- get_current_time()"]
Resources["Resources
- greeting://{name}"]
@@ -186,7 +188,7 @@ graph LR
PythonMCP --- Tools
PythonMCP --- Resources
- NodeMCP --- MermaidValidator
+ MermaidMCP --- MermaidValidator
end
subgraph "LLM Providers"
@@ -196,10 +198,10 @@ graph LR
Logfire[("Logfire
Tracing")]
Agent --> PythonMCP
- Agent --> NodeMCP
+ Agent --> MermaidMCP
PythonMCP --> LLMs
- NodeMCP --> LLMs
+ MermaidMCP --> LLMs
Agent --> Logfire
diff --git a/agents_mcp_usage/basic_mcp/README.md b/agents_mcp_usage/basic_mcp/README.md
index b5d2541..d6aae46 100644
--- a/agents_mcp_usage/basic_mcp/README.md
+++ b/agents_mcp_usage/basic_mcp/README.md
@@ -45,7 +45,7 @@ graph LR
end
subgraph "Python MCP Server"
- MCP["Model Context Protocol Server
(run_server.py)"]
+ MCP["Model Context Protocol Server
(mcp_servers/example_server.py)"]
Tools["Tools
- add(a, b)
- get_current_time()"]
Resources["Resources
- greeting://{name}"]
MCP --- Tools
@@ -229,7 +229,7 @@ The examples are designed to be as similar as possible, allowing you to compare
## MCP Server
-All examples connect to the same MCP server defined in `run_server.py` at the project root. This server provides:
+All examples connect to the same MCP server defined in `mcp_servers/example_server.py`. This server provides:
- An addition tool (`add(a, b)`)
- A time tool (`get_current_time()`)
diff --git a/agents_mcp_usage/basic_mcp/basic_mcp_use/adk_mcp.py b/agents_mcp_usage/basic_mcp/basic_mcp_use/adk_mcp.py
index 3df467f..d167efe 100644
--- a/agents_mcp_usage/basic_mcp/basic_mcp_use/adk_mcp.py
+++ b/agents_mcp_usage/basic_mcp/basic_mcp_use/adk_mcp.py
@@ -29,7 +29,7 @@ async def main(query: str = "Greet Andrew and give him the current time") -> Non
# Set up MCP server connection
server_params = StdioServerParameters(
command="uv",
- args=["run", "run_server.py", "stdio"],
+ args=["run", "mcp_servers/example_server.py", "stdio"],
)
tools, exit_stack = await MCPToolset.from_server(connection_params=server_params)
diff --git a/agents_mcp_usage/basic_mcp/basic_mcp_use/langgraph_mcp.py b/agents_mcp_usage/basic_mcp/basic_mcp_use/langgraph_mcp.py
index 4ceb295..72bad31 100644
--- a/agents_mcp_usage/basic_mcp/basic_mcp_use/langgraph_mcp.py
+++ b/agents_mcp_usage/basic_mcp/basic_mcp_use/langgraph_mcp.py
@@ -21,7 +21,7 @@
# Create server parameters for stdio connection
server = StdioServerParameters(
command="uv",
- args=["run", "run_server.py", "stdio"],
+ args=["run", "mcp_servers/example_server.py", "stdio"],
)
model = ChatGoogleGenerativeAI(
diff --git a/agents_mcp_usage/basic_mcp/basic_mcp_use/oai-agent_mcp.py b/agents_mcp_usage/basic_mcp/basic_mcp_use/oai-agent_mcp.py
index 969eff9..4dcc8d4 100644
--- a/agents_mcp_usage/basic_mcp/basic_mcp_use/oai-agent_mcp.py
+++ b/agents_mcp_usage/basic_mcp/basic_mcp_use/oai-agent_mcp.py
@@ -24,7 +24,7 @@ async def main(query: str = "Greet Andrew and give him the current time") -> Non
async with MCPServerStdio(
params={
"command": "uv",
- "args": ["run", "run_server.py", "stdio"],
+ "args": ["run", "mcp_servers/example_server.py", "stdio"],
}
) as server:
# Initialise the agent with the server
diff --git a/agents_mcp_usage/basic_mcp/basic_mcp_use/pydantic_mcp.py b/agents_mcp_usage/basic_mcp/basic_mcp_use/pydantic_mcp.py
index eb4bc22..6584e54 100644
--- a/agents_mcp_usage/basic_mcp/basic_mcp_use/pydantic_mcp.py
+++ b/agents_mcp_usage/basic_mcp/basic_mcp_use/pydantic_mcp.py
@@ -16,7 +16,7 @@
command="uv",
args=[
"run",
- "run_server.py",
+ "mcp_servers/example_server.py",
"stdio",
],
)
diff --git a/agents_mcp_usage/multi_mcp/README.md b/agents_mcp_usage/multi_mcp/README.md
index 254e3f7..f910806 100644
--- a/agents_mcp_usage/multi_mcp/README.md
+++ b/agents_mcp_usage/multi_mcp/README.md
@@ -9,8 +9,8 @@ Agents utilising multiple MCP servers can be dramatically more complex than an A
1. Configure `.env` and API keys following instructions in the [README.md](README.md)
-3. Ensure the Node.js MCP server can be used:
- - Install the Node.js MCP server (mermaid-validator) locally, run `make install` if you haven't already
+3. Ensure the Python MCP servers can be used:
+ - The Python MCP servers (example_server.py and mermaid_validator.py) are included in the repository
4. Run an example script:
```bash
@@ -38,8 +38,8 @@ graph LR
end
subgraph "MCP Servers"
- PythonMCP["Python MCP Server
(run_server.py)"]
- NodeMCP["Node.js MCP Server
(mermaid-validator)"]
+ PythonMCP["Python MCP Server
(mcp_servers/example_server.py)"]
+ MermaidMCP["Python Mermaid MCP Server
(mcp_servers/mermaid_validator.py)"]
Tools["Tools
- add(a, b)
- get_current_time()"]
Resources["Resources
- greeting://{name}"]
@@ -47,7 +47,7 @@ graph LR
PythonMCP --- Tools
PythonMCP --- Resources
- NodeMCP --- MermaidValidator
+ MermaidMCP --- MermaidValidator
end
subgraph "LLM Providers"
@@ -57,10 +57,10 @@ graph LR
Logfire[("Logfire
Tracing")]
Agent --> PythonMCP
- Agent --> NodeMCP
+ Agent --> MermaidMCP
PythonMCP --> LLMs
- NodeMCP --> LLMs
+ MermaidMCP --> LLMs
Agent --> Logfire
@@ -82,7 +82,7 @@ graph LR
class User userNode;
class Agent agentNode;
class PythonMCP pythonMcpNode;
- class NodeMCP nodeMcpNode;
+ class MermaidMCP nodeMcpNode;
class Tools,Resources,MermaidValidator toolNode;
class LLMs llmNode;
class LLM_Response outputNode;
@@ -98,10 +98,10 @@ sequenceDiagram
participant User
participant Agent as Pydantic-AI/ADK Agent
participant PyMCP as Python MCP Server
- participant NodeMCP as Node.js MCP Server
+ participant MermaidMCP as Python Mermaid MCP Server
participant LLM as LLM Provider
participant PyTools as Python Tools
- participant NodeTools as Mermaid Validator
+ participant MermaidTools as Mermaid Validator
participant Logfire as Logfire Tracing
Note over User,Logfire: Multi-MCP Interaction Flow
@@ -116,9 +116,9 @@ sequenceDiagram
activate PyMCP
PyMCP-->>Agent: Connection established
- Agent->>NodeMCP: Initialise connection
- activate NodeMCP
- NodeMCP-->>Agent: Connection established
+ Agent->>MermaidMCP: Initialise connection
+ activate MermaidMCP
+ MermaidMCP-->>Agent: Connection established
end
Agent->>LLM: Process user query
@@ -134,14 +134,14 @@ sequenceDiagram
deactivate PyTools
PyMCP-->>Agent: Tool result
Agent->>LLM: Continue with tool result
- else Node.js MCP Tools Needed
+ else Mermaid MCP Tools Needed
LLM-->>Agent: Need Mermaid validation
- Agent->>NodeMCP: Validate Mermaid diagram
- NodeMCP->>NodeTools: Process diagram
- activate NodeTools
- NodeTools-->>NodeMCP: Validation result
- deactivate NodeTools
- NodeMCP-->>Agent: Tool result
+ Agent->>MermaidMCP: Validate Mermaid diagram
+ MermaidMCP->>MermaidTools: Process diagram
+ activate MermaidTools
+ MermaidTools-->>MermaidMCP: Validation result
+ deactivate MermaidTools
+ MermaidMCP-->>Agent: Tool result
Agent->>LLM: Continue with tool result
end
end
@@ -154,8 +154,8 @@ sequenceDiagram
par Close MCP connections
Agent->>PyMCP: Close connection
deactivate PyMCP
- Agent->>NodeMCP: Close connection
- deactivate NodeMCP
+ Agent->>MermaidMCP: Close connection
+ deactivate MermaidMCP
end
Agent->>User: Display final answer
@@ -210,7 +210,7 @@ uv run agents_mcp_usage/multi_mcp/multi_mcp_use/adk_mcp.py
Key features:
- Uses Google's ADK framework with Gemini model
-- Connects to both Python MCP server and Node.js Mermaid validator
+- Connects to both Python MCP server and Python Mermaid validator
- Demonstrates proper connection management with contextlib.AsyncExitStack
- Shows how to handle asynchronous MCP tool integration
- Uses a simple test case that utilizes both MCP servers in a single query
diff --git a/agents_mcp_usage/multi_mcp/eval_multi_mcp/evals_pydantic_mcp.py b/agents_mcp_usage/multi_mcp/eval_multi_mcp/evals_pydantic_mcp.py
index 9b43814..079708e 100644
--- a/agents_mcp_usage/multi_mcp/eval_multi_mcp/evals_pydantic_mcp.py
+++ b/agents_mcp_usage/multi_mcp/eval_multi_mcp/evals_pydantic_mcp.py
@@ -36,7 +36,7 @@
command="uv",
args=[
"run",
- "run_server.py",
+ "mcp_servers/example_server.py",
"stdio",
],
)
diff --git a/agents_mcp_usage/multi_mcp/mermaid_diagrams.py b/agents_mcp_usage/multi_mcp/mermaid_diagrams.py
index baffa49..485c993 100644
--- a/agents_mcp_usage/multi_mcp/mermaid_diagrams.py
+++ b/agents_mcp_usage/multi_mcp/mermaid_diagrams.py
@@ -20,7 +20,7 @@
# MCP Server
subgraph "MCP"
direction TD
- MCP["Model Context Protocol Server
(run_server.py)"]
+ MCP["Model Context Protocol Server
(mcp_servers/example_server.py)"]
Tools["Tools
- add(a, b)
- get_current_time() e.g. {current_time}"]
Resources["Resources
- greeting://{{name}}"]
MCP --- Tools
@@ -86,7 +86,7 @@
# MCP Server
subgraph "MCP"
direction TB
- MCP["Model Context Protocol Server
(run_server.py)"]
+ MCP["Model Context Protocol Server
(mcp_servers/example_server.py)"]
Tools["Tools
- add(a, b)
- get_current_time() e.g. {current_time}"]
Resources["Resources
- greeting://{{name}}"]
MCP --- Tools
@@ -150,7 +150,7 @@
%% MCP Server
subgraph "MCP Server"
direction TB
- MCP["Model Context Protocol Server
(run_server.py)"]
+ MCP["Model Context Protocol Server
(mcp_servers/example_server.py)"]
Tools["Tools
- add(a, b)
- get_current_time() e.g. {current_time}"]
Resources["Resources
- greeting://{{name}}"]
MCPs --- Tools
@@ -214,7 +214,7 @@
%% MCP Server
subgraph "MCP Server"
direction TB
- MCP["Model Context Protocol Server
(run_server.py)"]
+ MCP["Model Context Protocol Server
(mcp_servers/example_server.py)"]
Tools["Tools
- add(a, b)
- get_current_time() e.g. {current_time}"]
Resources["Resources
- greeting://{{name}}"]
MCP --- Tools
diff --git a/agents_mcp_usage/multi_mcp/multi_mcp_use/adk_mcp.py b/agents_mcp_usage/multi_mcp/multi_mcp_use/adk_mcp.py
index 1a93a69..5e56efe 100644
--- a/agents_mcp_usage/multi_mcp/multi_mcp_use/adk_mcp.py
+++ b/agents_mcp_usage/multi_mcp/multi_mcp_use/adk_mcp.py
@@ -1,6 +1,7 @@
import asyncio
import contextlib
import os
+import time
import logfire
from dotenv import load_dotenv
@@ -35,16 +36,16 @@ async def get_tools_async():
command="uv",
args=[
"run",
- "run_server.py",
+ "mcp_servers/example_server.py",
"stdio",
],
)
mermaid_server = StdioServerParameters(
- command="npx",
+ command="uv",
args=[
- "-y",
- "@rtuin/mcp-mermaid-validator@latest",
+ "run",
+ "mcp_servers/mermaid_validator.py",
],
)
@@ -123,6 +124,13 @@ async def main(query: str = "Hi!", request_limit: int = 5) -> None:
await exit_stack.aclose()
print("Cleanup complete.")
+ # Give Logfire time to complete any pending exports
+ print("Shutting down Logfire...")
+ logfire.shutdown()
+ # Small delay to ensure export completes
+ time.sleep(0.5)
+ print("Logfire shutdown complete.")
+
if __name__ == "__main__":
query = f"Add the current time and fix the mermaid diagram syntax using the validator: {invalid_mermaid_diagram_easy}. Return only the fixed mermaid diagram between backticks."
diff --git a/agents_mcp_usage/multi_mcp/multi_mcp_use/pydantic_mcp.py b/agents_mcp_usage/multi_mcp/multi_mcp_use/pydantic_mcp.py
index 325ac99..7e60749 100644
--- a/agents_mcp_usage/multi_mcp/multi_mcp_use/pydantic_mcp.py
+++ b/agents_mcp_usage/multi_mcp/multi_mcp_use/pydantic_mcp.py
@@ -20,15 +20,15 @@
command="uv",
args=[
"run",
- "run_server.py",
+ "mcp_servers/example_server.py",
"stdio",
],
)
mermaid_server = MCPServerStdio(
- command="npx",
+ command="uv",
args=[
- "-y",
- "@rtuin/mcp-mermaid-validator@latest",
+ "run",
+ "mcp_servers/mermaid_validator.py",
],
)
# Create Agent with MCP servers
@@ -54,7 +54,7 @@ async def main(query: str = "Hi!", request_limit: int = 5) -> None:
# Invoke the agent with the usage limits
async with agent.run_mcp_servers():
result = await agent.run(query, usage_limits=usage_limits)
- # print(result.output)
+ print(result.output)
return result
diff --git a/run_server.py b/mcp_servers/example_server.py
similarity index 100%
rename from run_server.py
rename to mcp_servers/example_server.py