Skip to content

Commit 4967cad

Browse files
authored
Fix/template updates (#587)
* Update templates * Remove factory and token templates for now
1 parent 85dd46b commit 4967cad

File tree

6 files changed

+174
-44
lines changed

6 files changed

+174
-44
lines changed

src/mcp_agent/cli/commands/init.py

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -153,8 +153,8 @@ def init(
153153
scaffolding_templates = {
154154
"basic": "Simple agent with filesystem and fetch capabilities",
155155
"server": "MCP server with workflow and parallel agents",
156-
"token": "Token counting example with monitoring",
157-
"factory": "Agent factory with router-based selection",
156+
# "token": "Token counting example with monitoring",
157+
# "factory": "Agent factory with router-based selection",
158158
"minimal": "Minimal configuration files only",
159159
}
160160

@@ -360,7 +360,7 @@ def init(
360360
# No separate agents.yaml needed; agent definitions live in mcp_agent.config.yaml
361361

362362
# Create README for the basic template
363-
readme_content = _load_template("README_init.md")
363+
readme_content = _load_template("README_basic.md")
364364
if readme_content:
365365
created = _write_readme(dir, readme_content, force)
366366
if created:
@@ -374,23 +374,30 @@ def init(
374374
files_created.append(created)
375375

376376
elif template == "server":
377-
server_path = dir / "server.py"
377+
server_path = dir / "main.py"
378378
server_content = _load_template("basic_agent_server.py")
379379
if server_content and _write(server_path, server_content, force):
380-
files_created.append("server.py")
380+
files_created.append("main.py")
381381
# Make executable
382382
try:
383383
server_path.chmod(server_path.stat().st_mode | 0o111)
384384
except Exception:
385385
pass
386386

387387
# README for server template
388-
readme_content = _load_template("README_init.md")
388+
readme_content = _load_template("README_server.md")
389389
if readme_content:
390390
created = _write_readme(dir, readme_content, force)
391391
if created:
392392
files_created.append(created)
393393

394+
# Add basic requirements.txt
395+
requirements_content = _load_template("requirements.txt")
396+
if requirements_content:
397+
created = _write_requirements(dir, requirements_content, force)
398+
if created:
399+
files_created.append(created)
400+
394401
elif template == "token":
395402
token_path = dir / "token_example.py"
396403
token_content = _load_template("token_counter.py")
@@ -402,7 +409,7 @@ def init(
402409
except Exception:
403410
pass
404411

405-
readme_content = _load_template("README_init.md")
412+
readme_content = _load_template("README_token.md")
406413
if readme_content:
407414
created = _write_readme(dir, readme_content, force)
408415
if created:
@@ -425,7 +432,7 @@ def init(
425432
if agents_content and _write(agents_path, agents_content, force):
426433
files_created.append("agents.yaml")
427434

428-
readme_content = _load_template("README_init.md")
435+
readme_content = _load_template("README_factory.md")
429436
if readme_content:
430437
created = _write_readme(dir, readme_content, force)
431438
if created:
@@ -448,9 +455,9 @@ def init(
448455
run_file = entry_script_name or "main.py"
449456
console.print(f"3. Run your agent: [cyan]uv run {run_file}[/cyan]")
450457
elif template == "server":
451-
console.print("3. Run the server: [cyan]uv run server.py[/cyan]")
458+
console.print("3. Run the server: [cyan]uv run main.py[/cyan]")
452459
console.print(
453-
" Or serve: [cyan]mcp-agent dev serve --script server.py[/cyan]"
460+
" Or serve: [cyan]mcp-agent dev serve --script main.py[/cyan]"
454461
)
455462
elif template == "token":
456463
console.print("3. Run the example: [cyan]uv run token_example.py[/cyan]")
File renamed without changes.

src/mcp_agent/data/templates/README_factory.md

Whitespace-only changes.
Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
# MCP-Agent Server Starter
2+
3+
Welcome! This project was generated by `mcp-agent init`. It demonstrates how to expose your mcp-agent application as an MCP server, making your agentic workflows available to any MCP client.
4+
5+
## What's included
6+
7+
- An `MCPApp` named `basic_agent_server` (see `main.py`).
8+
- A workflow class `BasicAgentWorkflow`:
9+
- Uses `Agent` to connect to `filesystem` and `fetch` MCP servers.
10+
- Demonstrates multi-turn conversations with an LLM (OpenAI).
11+
- Shows how to configure model preferences for specific requests.
12+
- A tool function decorated with `@app.tool`:
13+
- `grade_story(story: str, app_ctx?)` - Grades a student's short story using parallel agents (proofreader, fact checker, style enforcer) via `ParallelLLM`.
14+
- Returns the final result directly to the caller (no polling needed).
15+
- Server logs are forwarded to connected MCP clients as notifications.
16+
17+
## What gets exposed as MCP tools
18+
19+
When you run `main.py`, your MCP server exposes:
20+
21+
- `workflows-list` - Lists available workflows and their parameter schemas
22+
- `workflows-BasicAgentWorkflow-run` - Executes the BasicAgentWorkflow with input
23+
- `workflows-get_status` - Get status for a running workflow by `run_id`
24+
- `workflows-cancel` - Cancel a running workflow
25+
- `grade_story` - Synchronous tool that grades a short story and returns the final result
26+
27+
## Quick start
28+
29+
1. Add your OpenAI API key to `mcp_agent.secrets.yaml` (or set `OPENAI_API_KEY` env var).
30+
31+
NOTE: You can use another supported provider (e.g. Anthropic) instead, just be sure to set its API key in the `mcp_agent.secrets.yaml` (or set its env var) and import/use the relevant `AugmentedLLM` in `main.py`.
32+
33+
2. Review `mcp_agent.config.yaml`:
34+
35+
- Execution engine: `asyncio`
36+
- Logger settings
37+
- MCP servers: `filesystem`, `fetch`
38+
39+
3. Install dependencies and run the server:
40+
41+
```bash
42+
uv pip install -r requirements.txt
43+
uv run main.py
44+
```
45+
46+
The server will start and expose its tools over sse. You'll see:
47+
48+
```bash
49+
Creating MCP server for basic_agent_server
50+
Registered workflows:
51+
- BasicAgentWorkflow
52+
MCP Server settings: ...
53+
```
54+
55+
4. Connect with an MCP client:
56+
57+
You can connect to this server using any MCP client. For example, use [MCP Inspector](https://github.com/modelcontextprotocol/inspector) to explore and test:
58+
59+
```bash
60+
npx @modelcontextprotocol/inspector --transport sse --server-url http://127.0.0.1:8000/sse
61+
```
62+
63+
This will launch the inspector UI where you can:
64+
65+
- See all available tools (`grade_story`, `workflows-BasicAgentWorkflow-run`, etc.)
66+
- Test workflow execution
67+
- View request/response details
68+
69+
5. Deploy as a remote MCP server:
70+
71+
When you're ready to deploy, ensure the required API keys are set in `mcp_agent.secrets.yaml` and then run:
72+
73+
```bash
74+
uv run mcp-agent login
75+
```
76+
77+
to authenticate to mcp-agent cloud. You will be redirected to the login page, create an mcp-agent cloud account through Google or Github.
78+
79+
Set up your mcp-agent cloud API Key and copy & paste it into your terminal
80+
81+
```bash
82+
INFO: Directing to MCP Agent Cloud API login...
83+
Please enter your API key 🔑:
84+
```
85+
86+
In your terminal, deploy the MCP app:
87+
88+
```bash
89+
uv run mcp-agent deploy basic_agent_server
90+
```
91+
92+
You will then be prompted to specify the type of secret to save your OpenAI API key as. Select (1) deployment secret so that it is available to the deployed server.
93+
94+
The `deploy` command will bundle the app files and deploy them, wrapping your app as a hosted MCP SSE server with a URL of the form:
95+
`https://<server_id>.deployments.mcp-agent.com`.
96+
97+
Anything decorated with `@app.tool` (or `@app.async_tool`) runs as a Temporal workflow in the cloud.
98+
99+
Since the mcp-agent app is exposed as an MCP server, it can be used in any MCP client just
100+
like any other MCP server. For example, you can inspect and test the server using MCP Inspector:
101+
102+
```bash
103+
npx @modelcontextprotocol/inspector --transport sse --server-url https://<server_id>.deployments.mcp-agent.com/sse
104+
```
105+
106+
## Notes
107+
108+
- `app_ctx` is the MCPApp Context (configuration, logger, upstream session, etc.).
109+
- Logging uses `app.logger` and is forwarded as notifications when connected to an MCP client.
110+
- Configuration is read from `mcp_agent.config.yaml` and `mcp_agent.secrets.yaml` (env vars supported).
111+
- The default model is configurable (see `openai.default_model` in config).
112+
- The server runs in `asyncio` mode and exposes tools via sse by default.
113+
114+
## Key concepts demonstrated
115+
116+
- **Creating workflows**: Use the `@app.workflow` decorator and `Workflow` base class to define reusable workflows.
117+
- **Defining tools**: Use `@app.tool` for synchronous tools that return results immediately.
118+
- **Using agents**: Create `Agent` instances with specific instructions and server access (filesystem, fetch, etc.).
119+
- **Parallel execution**: Use `ParallelLLM` to run multiple agents in parallel and aggregate their results.
120+
- **Multi-turn conversations**: LLMs maintain conversation context across multiple `generate_str()` calls.
121+
- **Model preferences**: Configure model selection via `RequestParams` and `ModelPreferences`.
122+
- **Server creation**: Use `create_mcp_server_for_app()` to wrap your MCPApp as an MCP server.
123+
124+
## Next steps
125+
126+
- Modify the `BasicAgentWorkflow` instructions or server list to fit your use case.
127+
- Add more tools with `@app.tool` or `@app.async_tool` as you grow the app.
128+
- Explore the `grade_story` tool to understand parallel agent execution.
129+
- Customize the agents used by `ParallelLLM` (proofreader, fact checker, style enforcer).
130+
- Read the docs and explore examples:
131+
- GitHub: https://github.com/lastmile-ai/mcp-agent
132+
- Docs: https://docs.mcp-agent.com/
133+
- Discord: https://lmai.link/discord/mcp-agent
134+
135+
## Further reading
136+
137+
- Configuration reference and secrets management.
138+
- MCP servers (stdio, SSE, streamable_http, websockets) and timeouts.
139+
- Temporal workflows, activities, and logging/notifications when deployed.
140+
- Agents and LLMs: `AgentSpec`, prompts, and model defaults.
141+
- Using `@app.async_tool` for long-running workflows (returns workflow_id/run_id for polling).
142+
143+
Happy building!

src/mcp_agent/data/templates/README_token.md

Whitespace-only changes.

src/mcp_agent/data/templates/basic_agent_server.py

Lines changed: 14 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
3. Declarative agent configuration using FastMCPApp decorators
88
"""
99

10-
import argparse
1110
import asyncio
1211
import os
1312
from typing import Optional
@@ -20,14 +19,15 @@
2019
from mcp_agent.agents.agent import Agent
2120
from mcp_agent.workflows.llm.augmented_llm import RequestParams
2221
from mcp_agent.workflows.llm.llm_selector import ModelPreferences
23-
from mcp_agent.workflows.llm.augmented_llm_anthropic import AnthropicAugmentedLLM
22+
23+
# We are using the OpenAI augmented LLM for this example but you can swap with others (e.g. AnthropicAugmentedLLM)
2424
from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM
2525
from mcp_agent.workflows.parallel.parallel_llm import ParallelLLM
2626
from mcp_agent.executor.workflow import Workflow, WorkflowResult
2727

2828
# Note: This is purely optional:
2929
# if not provided, a default FastMCP server will be created by MCPApp using create_mcp_server_for_app()
30-
mcp = FastMCP(name="basic_agent_server", description="My basic agent server example.")
30+
mcp = FastMCP(name="basic_agent_server")
3131

3232
# Define the MCPApp instance. The server created for this app will advertise the
3333
# MCP logging capability and forward structured logs upstream to connected clients.
@@ -57,8 +57,8 @@ async def run(self, input: str) -> WorkflowResult[str]:
5757
WorkflowResult containing the processed data.
5858
"""
5959

60-
logger = app.logger
6160
context = app.context
61+
logger = context.logger
6262

6363
logger.info("Current config:", data=context.config.model_dump())
6464
logger.info(
@@ -82,7 +82,7 @@ async def run(self, input: str) -> WorkflowResult[str]:
8282
result = await finder_agent.list_tools()
8383
logger.info("Tools available:", data=result.model_dump())
8484

85-
llm = await finder_agent.attach_llm(AnthropicAugmentedLLM)
85+
llm = await finder_agent.attach_llm(OpenAIAugmentedLLM)
8686

8787
result = await llm.generate_str(
8888
message=input,
@@ -123,12 +123,8 @@ async def grade_story(story: str, app_ctx: Optional[AppContext] = None) -> str:
123123
story: The student's short story to grade
124124
app_ctx: Optional MCPApp context for accessing app resources and logging
125125
"""
126-
# Use the context's app if available for proper logging with upstream_session
127-
_app = app_ctx.app if app_ctx else app
128-
# Ensure the app's logger is bound to the current context with upstream_session
129-
if _app._logger and hasattr(_app._logger, "_bound_context"):
130-
_app._logger._bound_context = app_ctx
131-
logger = _app.logger
126+
context = app_ctx or app.context
127+
logger = context.logger
132128
logger.info(f"grade_story: Received input: {story}")
133129

134130
proofreader = Agent(
@@ -184,40 +180,24 @@ async def grade_story(story: str, app_ctx: Optional[AppContext] = None) -> str:
184180

185181

186182
async def main():
187-
parser = argparse.ArgumentParser()
188-
parser.add_argument(
189-
"--custom-fastmcp-settings",
190-
action="store_true",
191-
help="Enable custom FastMCP settings for the server",
192-
)
193-
args = parser.parse_args()
194-
use_custom_fastmcp_settings = args.custom_fastmcp_settings
195-
196183
async with app.run() as agent_app:
197184
# Add the current directory to the filesystem server's args if needed
198185
context = agent_app.context
199186
if "filesystem" in context.config.mcp.servers:
200187
context.config.mcp.servers["filesystem"].args.extend([os.getcwd()])
201188

202189
# Log registered workflows and agent configurations
203-
agent_app.logger.info(f"Creating MCP server for {agent_app.name}")
190+
context.logger.info(f"Creating MCP server for {agent_app.name}")
204191

205-
agent_app.logger.info("Registered workflows:")
192+
context.logger.info("Registered workflows:")
206193
for workflow_id in agent_app.workflows:
207-
agent_app.logger.info(f" - {workflow_id}")
208-
209-
# Create the MCP server that exposes both workflows and agent configurations,
210-
# optionally using custom FastMCP settings
211-
fast_mcp_settings = (
212-
{"host": "localhost", "port": 8001, "debug": True, "log_level": "DEBUG"}
213-
if use_custom_fastmcp_settings
214-
else None
215-
)
216-
mcp_server = create_mcp_server_for_app(agent_app, **(fast_mcp_settings or {}))
217-
agent_app.logger.info(f"MCP Server settings: {mcp_server.settings}")
194+
context.logger.info(f" - {workflow_id}")
195+
196+
mcp_server = create_mcp_server_for_app(agent_app)
197+
context.logger.info(f"MCP Server settings: {mcp_server.settings}")
218198

219199
# Run the server
220-
await mcp_server.run_stdio_async()
200+
await mcp_server.run_sse_async()
221201

222202

223203
if __name__ == "__main__":

0 commit comments

Comments
 (0)