Skip to content

Commit d31fa51

Browse files
committed
Sample code for the article on building an MCP client
1 parent 16ad784 commit d31fa51

File tree

11 files changed

+909
-0
lines changed

11 files changed

+909
-0
lines changed

python-mcp-client/README.md

Lines changed: 81 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
# MCP Client
2+
3+
A minimal client for testing Model Context Protocol (MCP) servers, featuring AI integration.
4+
5+
## Features
6+
7+
- **MCP server integration**: Connect to any MCP server
8+
- **Server introspection**: List available tools, prompts, and resources with the `--members` option
9+
- **TUI AI-powered chat**: Interactive chat with AI-powered tool execution using the `--chat` option
10+
11+
## Installation
12+
13+
1. Download the sample code
14+
15+
2. Install dependencies:
16+
```console
17+
$ uv sync
18+
```
19+
20+
3. Set up your OpenAI API key:
21+
```console
22+
$ export OPENAI_API_KEY="your-openai-api-key"
23+
```
24+
25+
## Usage
26+
27+
### List Server Members
28+
29+
Inspect what tools, prompts, and resources are available on an MCP server:
30+
31+
```console
32+
$ uv run python -m mcp_client /path/to/mcp_server.py --members
33+
```
34+
35+
This command will display the following:
36+
37+
- Tools and their descriptions
38+
- Prompts and their purposes
39+
- Resources and their types
40+
41+
### Interactive Chat Mode
42+
43+
Start an interactive chat session using the MCP server tools:
44+
45+
```console
46+
$ uv run python -m mcp_client <path/to/mcp/server.py> --chat
47+
```
48+
49+
- Example - Ask questions and get AI-powered responses:
50+
```console
51+
$ uv run python -m mcp_client mcp_server/mcp_server.py --chat
52+
53+
MCP Client Started!
54+
Type your queries or 'quit' to exit.
55+
56+
You: Greet Pythonista
57+
58+
Assistant: [Used echo({'message': "Hello, Pythonista! 🐍 How's your coding journey going today?"})]
59+
Hello, Pythonista! 🐍 How's your coding journey going today?
60+
61+
You:
62+
```
63+
64+
- The AI will automatically use available MCP tools when needed
65+
- Type `quit` to exit
66+
67+
## Example MCP Server
68+
69+
The project includes, `mcp_server.py`, which is a minimal MCP server that provides:
70+
71+
- A sample tool that says hello
72+
- Sample prompts and resources
73+
74+
You can use this server to test the client's functionalities.
75+
76+
## Requirements
77+
78+
- Python >= 3.13
79+
- The MCP Python SDK and OpenAI Python SDK
80+
- An OpenAI API key
81+
- An MCP server to connect to

python-mcp-client/mcp_client/__init__.py

Whitespace-only changes.
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
import asyncio
2+
3+
from mcp_client.cli import parse_args
4+
from mcp_client.mcp_client import MCPClient
5+
6+
7+
def main():
8+
"""Entry point for the mcp-client CLI app."""
9+
args = parse_args()
10+
if not args.server_path.exists():
11+
print(f"Error: Server script '{args.server_path}' not found")
12+
return
13+
14+
async def run():
15+
try:
16+
async with MCPClient(args.server_path) as client:
17+
if args.members:
18+
await client.list_all_members()
19+
elif args.chat:
20+
await client.run_chat()
21+
except RuntimeError as e:
22+
print(e)
23+
24+
asyncio.run(run())
25+
26+
27+
if __name__ == "__main__":
28+
main()
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
async def run_chat(handler) -> None:
2+
"""Run an AI-handled chat session."""
3+
print("\nMCP Client's Chat Started!")
4+
print("Type your queries or 'quit' to exit.")
5+
6+
while True:
7+
try:
8+
if not (query := input("\nYou: ").strip()):
9+
continue
10+
if query.lower() == "quit":
11+
break
12+
13+
print("\n" + await handler.process_query(query))
14+
except Exception as e:
15+
print(f"\nError: {str(e)}")
16+
17+
print("\nGoodbye!")
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import argparse
2+
import pathlib
3+
4+
5+
def parse_args():
6+
"""Parse command line arguments and return parsed args."""
7+
parser = argparse.ArgumentParser(description="A minimal MCP client")
8+
parser.add_argument(
9+
"server_path",
10+
type=pathlib.Path,
11+
help="path to the MCP server script",
12+
)
13+
group = parser.add_mutually_exclusive_group(required=True)
14+
group.add_argument(
15+
"--members",
16+
action="store_true",
17+
help="list the MCP server's tools, prompts, and resources",
18+
)
19+
group.add_argument(
20+
"--chat",
21+
action="store_true",
22+
help="start an AI-powered chat with MCP server integration",
23+
)
24+
return parser.parse_args()
Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
import json
2+
import os
3+
4+
from mcp import ClientSession
5+
from openai import OpenAI
6+
7+
MODEL = "gpt-4o-mini"
8+
MAX_TOKENS = 1000
9+
10+
11+
class OpenAIQueryHandler:
12+
"""Handle OpenAI API interaction and MCP tool execution."""
13+
14+
def __init__(self, client_session: ClientSession):
15+
self.client_session = client_session
16+
if not (api_key := os.getenv("OPENAI_API_KEY")):
17+
raise RuntimeError(
18+
"Error: OPENAI_API_KEY environment variable not set",
19+
)
20+
self.openai = OpenAI(api_key=api_key)
21+
22+
async def process_query(self, query: str) -> str:
23+
"""Process a query using OpenAI and available MCP tools."""
24+
# Get initial Model's response and decision on tool calls
25+
messages = [{"role": "user", "content": query}]
26+
initial_response = self.openai.chat.completions.create(
27+
model=MODEL,
28+
max_tokens=MAX_TOKENS,
29+
messages=messages,
30+
tools=await self._get_tools(),
31+
)
32+
33+
current_message = initial_response.choices[0].message
34+
result_parts = []
35+
36+
if current_message.content:
37+
result_parts.append(current_message.content)
38+
39+
# Handle tool usage if present
40+
if tool_calls := current_message.tool_calls:
41+
messages.append(
42+
{
43+
"role": "assistant",
44+
"content": current_message.content or "",
45+
"tool_calls": tool_calls,
46+
}
47+
)
48+
49+
# Execute tools
50+
for tool_call in tool_calls:
51+
tool_result = await self._execute_tool(tool_call)
52+
result_parts.append(tool_result["log"])
53+
messages.append(tool_result["message"])
54+
55+
# Get final Model's response after tool execution
56+
final_response = self.openai.chat.completions.create(
57+
model=MODEL,
58+
max_tokens=MAX_TOKENS,
59+
messages=messages,
60+
)
61+
62+
if content := final_response.choices[0].message.content:
63+
result_parts.append(content)
64+
65+
return "Assistant: " + "\n".join(result_parts)
66+
67+
async def _get_tools(self) -> list:
68+
"""Get MCP tools formatted for OpenAI."""
69+
response = await self.client_session.list_tools()
70+
return [
71+
{
72+
"type": "function",
73+
"function": {
74+
"name": tool.name,
75+
"description": tool.description or "No description",
76+
"parameters": getattr(
77+
tool,
78+
"inputSchema",
79+
{"type": "object", "properties": {}},
80+
),
81+
},
82+
}
83+
for tool in response.tools
84+
]
85+
86+
async def _execute_tool(self, tool_call) -> dict:
87+
"""Execute an MCP tool call and return formatted result."""
88+
tool_name = tool_call.function.name
89+
tool_args = json.loads(tool_call.function.arguments or "{}")
90+
91+
try:
92+
result = await self.client_session.call_tool(tool_name, tool_args)
93+
content = result.content[0].text if result.content else ""
94+
log = f"[Used {tool_name}({tool_args})]"
95+
except Exception as e:
96+
content = f"Error: {e}"
97+
log = f"[{content}]"
98+
99+
return {
100+
"log": log,
101+
"message": {
102+
"role": "tool",
103+
"tool_call_id": tool_call.id,
104+
"content": content,
105+
},
106+
}
Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
import sys
2+
from contextlib import AsyncExitStack
3+
from typing import Self
4+
5+
from mcp import ClientSession, StdioServerParameters
6+
from mcp.client.stdio import stdio_client
7+
8+
from mcp_client import chat
9+
from mcp_client.handlers import OpenAIQueryHandler
10+
11+
12+
class MCPClient:
13+
"""MCP client to interact with MCP server.
14+
15+
Usage:
16+
async with MCPClient(server_path) as client:
17+
# Call client methods here...
18+
"""
19+
20+
client_session: ClientSession
21+
22+
def __init__(self, server_path: str):
23+
self.server_path = server_path
24+
self.exit_stack = AsyncExitStack()
25+
26+
async def list_all_members(self) -> None:
27+
"""List all available tools, prompts, and resources."""
28+
print("MCP Server Members")
29+
print("=" * 50)
30+
31+
sections = {
32+
"tools": self.client_session.list_tools,
33+
"prompts": self.client_session.list_prompts,
34+
"resources": self.client_session.list_resources,
35+
}
36+
for section, listing_method in sections.items():
37+
await self._list_section(section, listing_method)
38+
39+
print("\n" + "=" * 50)
40+
41+
async def run_chat(self) -> None:
42+
"""Start interactive chat with MCP server using OpenAI."""
43+
try:
44+
handler = OpenAIQueryHandler(self.client_session)
45+
await chat.run_chat(handler)
46+
except RuntimeError as e:
47+
print(e)
48+
49+
async def __aenter__(self) -> Self:
50+
self.client_session = await self._connect_to_server()
51+
return self
52+
53+
async def __aexit__(self, *_) -> None:
54+
await self.exit_stack.aclose()
55+
56+
async def _list_section(
57+
self,
58+
section: str,
59+
list_func,
60+
) -> None:
61+
try:
62+
items = getattr(await list_func(), section)
63+
if items:
64+
print(f"\n{section.upper()} ({len(items)}):")
65+
print("-" * 30)
66+
for item in items:
67+
description = item.description or "No description"
68+
print(f" > {item.name} - {description}")
69+
else:
70+
print(f"\n{section.upper()}: None available")
71+
except Exception as e:
72+
print(f"\n{section.upper()}: Error - {e}")
73+
74+
async def _connect_to_server(self) -> ClientSession:
75+
try:
76+
stdio, write = await self.exit_stack.enter_async_context(
77+
stdio_client(
78+
server=StdioServerParameters(
79+
command="sh",
80+
args=[
81+
"-c",
82+
f"{sys.executable} {self.server_path} 2>/dev/null",
83+
],
84+
env=None,
85+
)
86+
)
87+
)
88+
client_session = await self.exit_stack.enter_async_context(
89+
ClientSession(stdio, write)
90+
)
91+
await client_session.initialize()
92+
return client_session
93+
except Exception:
94+
raise RuntimeError("Error: Failed to connect to server")
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
Hello, World!
2+
Welcome to MCP Client programming!
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
from mcp.server.fastmcp import FastMCP
2+
3+
mcp = FastMCP("mcp_server")
4+
5+
6+
@mcp.tool()
7+
async def echo(message: str) -> str:
8+
"""Echo back the message."""
9+
return message
10+
11+
12+
@mcp.prompt()
13+
async def greeting_prompt(name: str) -> str:
14+
"""A simple greeting prompt."""
15+
return f"Great {name} kindly."
16+
17+
18+
@mcp.resource("file://./greeting.txt")
19+
def greeting_file() -> str:
20+
"""The greeting text file."""
21+
with open("greeting.txt", "r", encoding="utf-8") as file:
22+
return file.read()
23+
24+
25+
if __name__ == "__main__":
26+
mcp.run(transport="stdio")

0 commit comments

Comments
 (0)