Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions python-mcp-client/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Build a Python MCP Client to Test Servers From Your Terminal

This folder provides the code examples for the Real Python article [Build a Python MCP Client to Test Servers From Your Terminal](https://realpython.com/python-mcp-client/).
81 changes: 81 additions & 0 deletions python-mcp-client/source-code-final/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# MCP Client

A minimal client for testing Model Context Protocol (MCP) servers, featuring AI integration.

## Features

- **MCP server integration**: Connect to any MCP server
- **Server introspection**: List available tools, prompts, and resources with the `--members` option
- **TUI AI-powered chat**: Interactive chat with AI-powered tool execution using the `--chat` option

## Installation

1. Download the sample code

2. Install dependencies:
```console
$ uv sync
```

3. Set up your OpenAI API key:
```console
$ export OPENAI_API_KEY="your-openai-api-key"
```

## Usage

### List Server Members

Inspect what tools, prompts, and resources are available on an MCP server:

```console
$ uv run python -m mcp_client /path/to/mcp_server.py --members
```

This command will display the following:

- Tools and their descriptions
- Prompts and their purposes
- Resources and their types

### Interactive Chat Mode

Start an interactive chat session using the MCP server tools:

```console
$ uv run python -m mcp_client <path/to/mcp/server.py> --chat
```

- Example - Ask questions and get AI-powered responses:
```console
$ uv run python -m mcp_client mcp_server/mcp_server.py --chat

MCP Client Started!
Type your queries or 'quit' to exit.

You: Greet Pythonista

Assistant: [Used echo({'message': "Hello, Pythonista! 🐍 How's your coding journey going today?"})]
Hello, Pythonista! 🐍 How's your coding journey going today?

You:
```

- The AI will automatically use available MCP tools when needed
- Type `quit` to exit

## Example MCP Server

The project includes, `mcp_server.py`, which is a minimal MCP server that provides:

- A sample tool that says hello
- Sample prompts and resources

You can use this server to test the client's functionalities.

## Requirements

- Python >= 3.13
- The MCP Python SDK and OpenAI Python SDK
- An OpenAI API key
- An MCP server to connect to
Empty file.
28 changes: 28 additions & 0 deletions python-mcp-client/source-code-final/mcp_client/__main__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import asyncio

from mcp_client.cli import parse_args
from mcp_client.mcp_client import MCPClient


def main():
"""Entry point for the mcp-client CLI app."""
args = parse_args()
if not args.server_path.exists():
print(f"Error: Server script '{args.server_path}' not found")
return

async def run():
try:
async with MCPClient(args.server_path) as client:
if args.members:
await client.list_all_members()
elif args.chat:
await client.run_chat()
except RuntimeError as e:
print(e)

asyncio.run(run())


if __name__ == "__main__":
main()
17 changes: 17 additions & 0 deletions python-mcp-client/source-code-final/mcp_client/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
async def run_chat(handler) -> None:
"""Run an AI-handled chat session."""
print("\nMCP Client's Chat Started!")
print("Type your queries or 'quit' to exit.")

while True:
try:
if not (query := input("\nYou: ").strip()):
continue
if query.lower() == "quit":
break

print("\n" + await handler.process_query(query))
except Exception as e:
print(f"\nError: {str(e)}")

print("\nGoodbye!")
24 changes: 24 additions & 0 deletions python-mcp-client/source-code-final/mcp_client/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import argparse
import pathlib


def parse_args():
"""Parse command line arguments and return parsed args."""
parser = argparse.ArgumentParser(description="A minimal MCP client")
parser.add_argument(
"server_path",
type=pathlib.Path,
help="path to the MCP server script",
)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
"--members",
action="store_true",
help="list the MCP server's tools, prompts, and resources",
)
group.add_argument(
"--chat",
action="store_true",
help="start an AI-powered chat with MCP server integration",
)
return parser.parse_args()
106 changes: 106 additions & 0 deletions python-mcp-client/source-code-final/mcp_client/handlers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import json
import os

from mcp import ClientSession
from openai import OpenAI

MODEL = "gpt-4o-mini"
MAX_TOKENS = 1000


class OpenAIQueryHandler:
"""Handle OpenAI API interaction and MCP tool execution."""

def __init__(self, client_session: ClientSession):
self.client_session = client_session
if not (api_key := os.getenv("OPENAI_API_KEY")):
raise RuntimeError(
"Error: OPENAI_API_KEY environment variable not set",
)
self.openai = OpenAI(api_key=api_key)

async def process_query(self, query: str) -> str:
"""Process a query using OpenAI and available MCP tools."""
# Get initial Model's response and decision on tool calls
messages = [{"role": "user", "content": query}]
initial_response = self.openai.chat.completions.create(
model=MODEL,
max_tokens=MAX_TOKENS,
messages=messages,
tools=await self._get_tools(),
)

current_message = initial_response.choices[0].message
result_parts = []

if current_message.content:
result_parts.append(current_message.content)

# Handle tool usage if present
if tool_calls := current_message.tool_calls:
messages.append(
{
"role": "assistant",
"content": current_message.content or "",
"tool_calls": tool_calls,
}
)

# Execute tools
for tool_call in tool_calls:
tool_result = await self._execute_tool(tool_call)
result_parts.append(tool_result["log"])
messages.append(tool_result["message"])

# Get final Model's response after tool execution
final_response = self.openai.chat.completions.create(
model=MODEL,
max_tokens=MAX_TOKENS,
messages=messages,
)

if content := final_response.choices[0].message.content:
result_parts.append(content)

return "Assistant: " + "\n".join(result_parts)

async def _get_tools(self) -> list:
"""Get MCP tools formatted for OpenAI."""
response = await self.client_session.list_tools()
return [
{
"type": "function",
"function": {
"name": tool.name,
"description": tool.description or "No description",
"parameters": getattr(
tool,
"inputSchema",
{"type": "object", "properties": {}},
),
},
}
for tool in response.tools
]

async def _execute_tool(self, tool_call) -> dict:
"""Execute an MCP tool call and return formatted result."""
tool_name = tool_call.function.name
tool_args = json.loads(tool_call.function.arguments or "{}")

try:
result = await self.client_session.call_tool(tool_name, tool_args)
content = result.content[0].text if result.content else ""
log = f"[Used {tool_name}({tool_args})]"
except Exception as e:
content = f"Error: {e}"
log = f"[{content}]"

return {
"log": log,
"message": {
"role": "tool",
"tool_call_id": tool_call.id,
"content": content,
},
}
94 changes: 94 additions & 0 deletions python-mcp-client/source-code-final/mcp_client/mcp_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import sys
from contextlib import AsyncExitStack
from typing import Self

from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client

from mcp_client import chat
from mcp_client.handlers import OpenAIQueryHandler


class MCPClient:
"""MCP client to interact with MCP server.

Usage:
async with MCPClient(server_path) as client:
# Call client methods here...
"""

client_session: ClientSession

def __init__(self, server_path: str):
self.server_path = server_path
self.exit_stack = AsyncExitStack()

async def list_all_members(self) -> None:
"""List all available tools, prompts, and resources."""
print("MCP Server Members")
print("=" * 50)

sections = {
"tools": self.client_session.list_tools,
"prompts": self.client_session.list_prompts,
"resources": self.client_session.list_resources,
}
for section, listing_method in sections.items():
await self._list_section(section, listing_method)

print("\n" + "=" * 50)

async def run_chat(self) -> None:
"""Start interactive chat with MCP server using OpenAI."""
try:
handler = OpenAIQueryHandler(self.client_session)
await chat.run_chat(handler)
except RuntimeError as e:
print(e)

async def __aenter__(self) -> Self:
self.client_session = await self._connect_to_server()
return self

async def __aexit__(self, *_) -> None:
await self.exit_stack.aclose()

async def _list_section(
self,
section: str,
list_func,
) -> None:
try:
items = getattr(await list_func(), section)
if items:
print(f"\n{section.upper()} ({len(items)}):")
print("-" * 30)
for item in items:
description = item.description or "No description"
print(f" > {item.name} - {description}")
else:
print(f"\n{section.upper()}: None available")
except Exception as e:
print(f"\n{section.upper()}: Error - {e}")

async def _connect_to_server(self) -> ClientSession:
try:
stdio, write = await self.exit_stack.enter_async_context(
stdio_client(
server=StdioServerParameters(
command="sh",
args=[
"-c",
f"{sys.executable} {self.server_path} 2>/dev/null",
],
env=None,
)
)
)
client_session = await self.exit_stack.enter_async_context(
ClientSession(stdio, write)
)
await client_session.initialize()
return client_session
except Exception:
raise RuntimeError("Error: Failed to connect to server")
2 changes: 2 additions & 0 deletions python-mcp-client/source-code-final/mcp_server/greeting.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
Hello, World!
Welcome to MCP Client programming!
26 changes: 26 additions & 0 deletions python-mcp-client/source-code-final/mcp_server/mcp_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from mcp.server.fastmcp import FastMCP

mcp = FastMCP("mcp_server")


@mcp.tool()
async def echo(message: str) -> str:
"""Echo back the message."""
return message


@mcp.prompt()
async def greeting_prompt(name: str) -> str:
"""A simple greeting prompt."""
return f"Great {name} kindly."


@mcp.resource("file://./greeting.txt")
def greeting_file() -> str:
"""The greeting text file."""
with open("greeting.txt", "r", encoding="utf-8") as file:
return file.read()


if __name__ == "__main__":
mcp.run(transport="stdio")
Loading