Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 57 additions & 0 deletions examples/servers/mcp-server-template/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
## This dockerfile is a template for all MCP servers
## It can be used as is or edited to fit your needs


FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS builder

WORKDIR /app

ENV UV_LINK_MODE=copy

# Enable bytecode compilation: longer builds but faster startup
ENV UV_COMPILE_BYTECODE=1

# Install project dependencies using mounted uv.lock (ensure it exists locally)
RUN --mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=uv.lock,target=uv.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
uv sync --frozen --no-install-project --no-dev --no-editable

# Install the project itself (done for layer caching optimization)
ADD . /app
RUN --mount=type=cache,target=/root/.cache/uv \
uv sync --frozen --no-dev --no-editable

# --- Dev stage --- #
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS dev

WORKDIR /app

ENV UV_LINK_MODE=copy
ENV UV_COMPILE_BYTECODE=1

COPY --from=builder /app/.venv /app/.venv

# Additionally install dev and debug dependencies
RUN --mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=uv.lock,target=uv.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
uv sync --frozen --no-install-project --group dev --group debug

# Make container use python installed inside the venv
ENV PATH="/app/.venv/bin:$PATH"

CMD ["tail", "-f", "/dev/null"]

# --- Production stage --- #
FROM python:3.12-slim-bookworm AS prod

WORKDIR /app

COPY --from=builder /app/.venv /app/.venv

ENV PATH="/app/.venv/bin:$PATH"

EXPOSE 8000

CMD ["python", "-m", "mcp_server_template", "--host", "0.0.0.0", "--port", "8000"]
21 changes: 21 additions & 0 deletions examples/servers/mcp-server-template/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2024 Xyber inc.

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
172 changes: 172 additions & 0 deletions examples/servers/mcp-server-template/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
# MCP Template Server

> **General:** This repository serves as a template for creating new MCP (Model Context Protocol) servers.
> It provides a basic structure and examples for implementing MCP-compatible microservices.

## Overview

This template demonstrates how to create a microservice that exposes functionality through the Model Context Protocol (MCP). It includes a basic calculator service as an example implementation.

## MCP Tools:


1. `calculate`
- **Description:** Performs basic arithmetic calculations
- **Input:**
- Operand (Literal["add", "subtract", "multiply", "divide"])
- Variable 1 (float)
- Variable 2 (float)
- **Output:** A string containing the calculated result


## Requirements

- Python 3.12+
- UV (for dependency management)
- Docker (optional, for containerization)

## Setup

1. **Clone the Repository**:
```bash
git clone <repository-url>
cd mcp-server-template
```

2. **Create `.env` File based on `.env.example`**:
```dotenv
# Example environment variables
MCP_CALCULATOR_HOST="0.0.0.0"
MCP_CALCULATOR_PORT=8000
LOGGING_LEVEL="info"
```

3. **Install Dependencies**:
```bash
uv sync .
```

## Running the Server

### Locally

```bash
# Basic run
python -m mcp_server_calculator

# Custom port and host
python -m mcp_server_calculator --host 0.0.0.0 --port 8000
```

### Using Docker

```bash
# Build the image
docker build -t mcp-server-calculator .

# Run the container
docker run --rm -it -p 8000:8000 --env-file .env mcp-server-calculator
```

## Example Client
When server startup is completed, any MCP client
can utilize connection to it


This example shows how to use the calculator service with a LangGraph ReAct agent:

```python
import os
import asyncio
from dotenv import load_dotenv
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI

async def main():
# Load environment variables from .env, should contain OPENAI_API_KEY
load_dotenv()

# Initialize LLM
model = ChatOpenAI(model="gpt-4")

# Connect to MCP server
async with MultiServerMCPClient({
"calculate": {
"url": "http://localhost:8000/sse",
"transport": "sse",
}
}) as client:
# Get available tools
tools = client.get_tools()

# !! IMPORTANT : Get tools and modify them to have return_direct=True!!!
# Otherwise langgraph agent could fall in an eternal loop,
# ignoring tool results
tools: list[StructuredTool] = client.get_tools()
for tool in tools:
tool.return_direct = True

# Use case 1: Create agent with tools
agent = create_react_agent(model, tools)

# Example query using the calculator
response = await agent.ainvoke({
"messages": [{
"role": "user",
"content": "What is 15% of 850, rounded to 2 decimal places?"
}]
})

print(response["messages"][-1].content)

# Use case 2: Run tool directly:


# !IMPORTANT
# Always set tool_call_id to some value: otherwise
# tool cool would not return any artifacts beyond text
# https://github.com/langchain-ai/langchain/issues/29874
result: ToolMessage = await tool.arun(parameters,
response_format='content_and_artifact',
tool_call_id=uuid.uuid4())
print("Tool result:", result)

if __name__ == "__main__":
asyncio.run(main())
```

## Project Structure

```
mcp-server-template/
├── src/
│ └── mcp_server_calculator/
└── calculator/ # Contains all the business logic
├── __init__.py # Exposes all needed functionality to server.py
├── config.py # Contains module env settings, custom Error classes
├── module.py # Business module core logic
│ ├── __init__.py
│ ├── __main__.py # Contains uvicorn server setup logic
│ ├── logging_config.py # Contains shared logging configuration
│ ├── server.py # Contains tool schemas/definitions, sets MCP server up
├── .env.example
├── .gitignore
├── Dockerfile
├── LICENSE
├── pyproject.toml
├── README.md
└── uv.lock
```

## Contributing

1. Fork the repository
2. Create your feature branch
3. Commit your changes
4. Push to the branch
5. Create a Pull Request

## License

MIT
42 changes: 42 additions & 0 deletions examples/servers/mcp-server-template/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
## This file should be edited

[project]
name = "mcp-server-calculator"
version = "0.1.0"
description = "MCP Server providing tools to ..."
authors = [
{ name = "XyAgent", email = "[email protected]" },
]
readme = "README.md"
requires-python = ">=3.12"

## Feel free to add more dependencies here!
## Can be done manually or via "uv add ..."

dependencies = [
"mcp>=1.6.0,<2.0.0",
"starlette>=0.37.0",
"pydantic-settings>=2.0",
"uvicorn[standard]",
]

[dependency-groups]
dev = [
"pytest",
"httpx",
"ruff",
"black",
"isort",
# feel free to add/edit dependencies
]
debug = [
"debugpy>=1.8.0",
]

[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"


[tool.hatch.build.targets.wheel]
packages = ["src/mcp_server_calculator"] ## Don't forget to put the actual path to your project here!
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# This template file mostly will stay the same for all MCP servers
# It is responsible for launching a uvicorn server with the given MCP server

import argparse
import logging
import os

import uvicorn
from mcp.server import Server
from mcp.server.sse import SseServerTransport
from mcp_server_calculator.logging_config import (configure_logging,
logging_level)
from mcp_server_calculator.server import server
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.routing import Mount, Route

configure_logging()
logger = logging.getLogger(__name__)

# --- Application Factory --- #


def create_starlette_app() -> Starlette:
"""Create a Starlette application that can server the provied mcp server with SSE."""

sse = SseServerTransport("/messages/")
mcp_server: Server = server

async def handle_sse(request: Request) -> None:
async with sse.connect_sse(
request.scope,
request.receive,
request._send, # noqa: SLF001
) as (read_stream, write_stream):
await mcp_server.run(
read_stream,
write_stream,
mcp_server.create_initialization_options(),
)

return Starlette(
debug=logging_level == "DEBUG",
routes=[
Route("/sse", endpoint=handle_sse),
Mount("/messages/", app=sse.handle_post_message),
],
)


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run Calculator MCP server")
parser.add_argument(
"--host",
default=os.getenv("MCP_CALCULATOR_HOST", "0.0.0.0"), # Override with your env variables
help="Host to bind to (Default: MCP_CALCULATOR_HOST or 0.0.0.0)", # Override with your env variables
)
parser.add_argument(
"--port",
type=int,
default=int(os.getenv("MCP_CALCULATOR_PORT", "8000")), # Override with your env variables
help="Port to listen on (Default: MCP_CALCULATOR_PORT or 8000)", # Override with your env variables
)
parser.add_argument(
"--reload",
action="store_true",
default=os.getenv("MCP_CALCULATOR_HOT_RELOAD", "false").lower()
in ("true", "1", "t", "yes"), # Override with your env variables
help="Enable hot reload (env: MCP_CALCULATOR_HOT_RELOAD)", # Override with your env variables
)

args = parser.parse_args()
logger.info(f"Starting Calculator MCP server on {args.host}:{args.port}")

# Don't forget to change the module name to your own!
uvicorn.run(
"mcp_server_calculator.__main__:create_starlette_app",
host=args.host,
port=args.port,
reload=args.reload,
log_level=logging_level.lower(),
factory=True,
)
Loading