From 9e9c6c0112247ce59ee7573410787dade48a6b9d Mon Sep 17 00:00:00 2001 From: rholinshead <5060851+rholinshead@users.noreply.github.com> Date: Wed, 22 Oct 2025 23:06:11 -0400 Subject: [PATCH 1/4] Update basic example --- src/mcp_agent/data/templates/README_init.md | 31 +++++++++++++++------ src/mcp_agent/data/templates/basic_agent.py | 12 +++++--- src/mcp_agent/data/templates/secrets.yaml | 31 +++++++++++---------- 3 files changed, 47 insertions(+), 27 deletions(-) diff --git a/src/mcp_agent/data/templates/README_init.md b/src/mcp_agent/data/templates/README_init.md index 7a2bb3599..9af14aff6 100644 --- a/src/mcp_agent/data/templates/README_init.md +++ b/src/mcp_agent/data/templates/README_init.md @@ -15,10 +15,9 @@ Welcome! This project was generated by `mcp-agent init`. It’s a minimal, reada ## Quick start -1. Add API keys to `mcp_agent.secrets.yaml` (or set env vars): +1. Add your OpenAI API key to `mcp_agent.secrets.yaml` (or set `OPENAI_API_KEY` env var). - - `OPENAI_API_KEY` (recommended) - - `ANTHROPIC_API_KEY` (optional) +NOTE: You can use another supported provider (e.g. Anthropic) instead, just be sure to set its API key in the `mcp_agent.secrets.yaml` and import/use the relevant `AugmentedLLM` in `main.py`. 2. Review `mcp_agent.config.yaml`: @@ -38,23 +37,39 @@ You’ll see two summaries printed: - A summary of `README.md` from your current directory. - A summary of the intro page at modelcontextprotocol.io. -4. Deploy a remote MCP server: - -### Run as an MCP server +4. Run locally as an MCP server: - In `main.py`, UNCOMMENT the server lines that call `create_mcp_server_for_app(agent_app)` and `run_sse_async()`. - Start the server: `uv run main.py` +- Once you see the server started, e.g. + ```bash + Uvicorn running on http://127.0.0.1:8000 + ``` + you can connect to it with your preferred MCP Client. For example, you can use [MCP Inspector](https://github.com/modelcontextprotocol/inspector) to explore and test the server: + +```bash +npx @modelcontextprotocol/inspector --transport sse --server-url http://127.0.0.1:8000/sse +``` + +5. Deploy as a remote MCP server: When you're ready to deploy, simply run: ```bash -mcp-agent deploy "hello_world" +uv run mcp-agent deploy "hello_world" ``` - This wraps your app as a hosted MCP SSE server. - Anything decorated with `@app.tool` (or `@app.async_tool`) runs as a Temporal workflow in the cloud. -Notes +Since the mcp-agent app is exposed as an MCP server, it can be used in any MCP client just +like any other MCP server. For example, you can inspect and test the server using MCP Inspector: + +```bash +npx @modelcontextprotocol/inspector --transport sse --server-url https://.deployments.mcp-agent.com/sse +``` + +## Notes - `app_ctx` is the MCPApp Context (configuration, logger, upstream session, etc.). - Logging uses `app.logger` and is forwarded as notifications when connected to an MCP client. diff --git a/src/mcp_agent/data/templates/basic_agent.py b/src/mcp_agent/data/templates/basic_agent.py index 8cfc33f3a..c30cb0694 100644 --- a/src/mcp_agent/data/templates/basic_agent.py +++ b/src/mcp_agent/data/templates/basic_agent.py @@ -7,7 +7,7 @@ - @app.tool and @app.async_tool decorators to expose your agents as long-running tools on an MCP server. - Advanced MCP features: Notifications, sampling, and elicitation -You can run this example locally using "uv run main.py", and also deploy it as an MCP server using "mcp-agent deploy". +You can run this example locally using "uv run main.py", and also deploy it as an MCP server using "uv run mcp-agent deploy". Let's get started! """ @@ -21,7 +21,10 @@ from mcp_agent.agents.agent import Agent from mcp_agent.agents.agent_spec import AgentSpec from mcp_agent.core.context import Context as AppContext +from mcp_agent.server.app_server import create_mcp_server_for_app from mcp_agent.workflows.factory import create_agent + +# We are using the OpenAI augmented LLM for this example but you can swap with others (e.g. AnthropicAugmentedLLM) from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM # Create the MCPApp, the root of mcp-agent. @@ -135,20 +138,21 @@ async def main(): print(webpage_summary) # UNCOMMENT to run this MCPApp as an MCP server + # NOTE: You can comment-out the above agent runs if you only want to run the server ######################################################### # Create the MCP server that exposes both workflows and agent configurations, # optionally using custom FastMCP settings - # mcp_server = create_mcp_server_for_app(agent_app) + mcp_server = create_mcp_server_for_app(agent_app) # # Run the server - # await mcp_server.run_sse_async() + await mcp_server.run_sse_async() if __name__ == "__main__": asyncio.run(main()) # When you're ready to deploy this MCPApp as a remote SSE server, run: -# > mcp-agent deploy "hello_world" +# > uv run mcp-agent deploy "hello_world" # # Congrats! You made it to the end of the getting-started example! # There is a lot more that mcp-agent can do, and we hope you'll explore the rest of the documentation. diff --git a/src/mcp_agent/data/templates/secrets.yaml b/src/mcp_agent/data/templates/secrets.yaml index 7e80da4fc..d476d47cd 100644 --- a/src/mcp_agent/data/templates/secrets.yaml +++ b/src/mcp_agent/data/templates/secrets.yaml @@ -2,26 +2,27 @@ # WARNING: Keep this file secure and never commit to version control # Provider API Keys +# We default to OpenAI, but you can configure your preferred providers here. # You can also set these as environment variables instead openai: - api_key: "" # Or use OPENAI_API_KEY env var + api_key: "" # Or use OPENAI_API_KEY env var -anthropic: - api_key: "" # Or use ANTHROPIC_API_KEY env var +# anthropic: +# api_key: "" # Or use ANTHROPIC_API_KEY env var -google: - api_key: "" # Or use GOOGLE_API_KEY env var +# google: +# api_key: "" # Or use GOOGLE_API_KEY env var -azure: - api_key: "" # Or use AZURE_API_KEY env var - base_url: "" # https://your-resource.openai.azure.com/ - api_version: "2024-02-01" - # use_default_azure_credential: false # Set to true for DefaultAzureCredential +# azure: +# api_key: "" # Or use AZURE_API_KEY env var +# base_url: "" # https://your-resource.openai.azure.com/ +# api_version: "2024-02-01" +# # use_default_azure_credential: false # Set to true for DefaultAzureCredential -bedrock: - aws_access_key_id: "" # Or use AWS_ACCESS_KEY_ID env var - aws_secret_access_key: "" # Or use AWS_SECRET_ACCESS_KEY env var - aws_region: "us-east-1" +# bedrock: +# aws_access_key_id: "" # Or use AWS_ACCESS_KEY_ID env var +# aws_secret_access_key: "" # Or use AWS_SECRET_ACCESS_KEY env var +# aws_region: "us-east-1" # MCP Server environment variables # mcp: @@ -31,4 +32,4 @@ bedrock: # GITHUB_PERSONAL_ACCESS_TOKEN: ghp_... # brave-search: # env: -# BRAVE_API_KEY: BSA_... \ No newline at end of file +# BRAVE_API_KEY: BSA_... From c3cd51e2e6305bbc526c594a4f87ced8660d4f24 Mon Sep 17 00:00:00 2001 From: rholinshead <5060851+rholinshead@users.noreply.github.com> Date: Thu, 23 Oct 2025 00:03:03 -0400 Subject: [PATCH 2/4] Add basic requirements.txt generation --- src/mcp_agent/cli/commands/init.py | 27 +++++++++++++++++++ src/mcp_agent/data/templates/basic_agent.py | 10 ++++--- src/mcp_agent/data/templates/requirements.txt | 5 ++++ src/mcp_agent/data/templates/secrets.yaml | 12 ++++----- 4 files changed, 44 insertions(+), 10 deletions(-) create mode 100644 src/mcp_agent/data/templates/requirements.txt diff --git a/src/mcp_agent/cli/commands/init.py b/src/mcp_agent/cli/commands/init.py index 475110ee6..aa07048fc 100644 --- a/src/mcp_agent/cli/commands/init.py +++ b/src/mcp_agent/cli/commands/init.py @@ -74,6 +74,26 @@ def _write_readme(dir_path: Path, content: str, force: bool) -> str | None: return None +def write_requirements(dir_path: Path, content: str, force: bool) -> str | None: + """Create a requirements.txt file with fallback logging if one already exists. + + Returns the filename created, or None if it could not be written (in which case + the content is printed to console as a fallback). + """ + path = dir_path / "requirements.txt" + if not path.exists() or force: + ok = _write(path, content, force) + if ok: + return "requirements.txt" + # Fallback: print content to console if we couldn't write the file + console.print( + "\n[yellow]A requirements.txt already exists and could not be overwritten.[/yellow]" + ) + console.print("[bold]Suggested requirements.txt contents:[/bold]\n") + console.print(content) + return None + + @app.callback(invoke_without_command=True) def init( ctx: typer.Context, @@ -191,6 +211,13 @@ def init( if created: files_created.append(created) + # Add basic requirements.txt + requirements_content = _load_template("requirements.txt") + if requirements_content: + created = write_requirements(dir, requirements_content, force) + if created: + files_created.append(created) + elif template == "server": server_path = dir / "server.py" server_content = _load_template("basic_agent_server.py") diff --git a/src/mcp_agent/data/templates/basic_agent.py b/src/mcp_agent/data/templates/basic_agent.py index c30cb0694..3277e643f 100644 --- a/src/mcp_agent/data/templates/basic_agent.py +++ b/src/mcp_agent/data/templates/basic_agent.py @@ -21,7 +21,9 @@ from mcp_agent.agents.agent import Agent from mcp_agent.agents.agent_spec import AgentSpec from mcp_agent.core.context import Context as AppContext -from mcp_agent.server.app_server import create_mcp_server_for_app + +# UNCOMMENT to run this MCPApp as a server +# from mcp_agent.server.app_server import create_mcp_server_for_app from mcp_agent.workflows.factory import create_agent # We are using the OpenAI augmented LLM for this example but you can swap with others (e.g. AnthropicAugmentedLLM) @@ -137,15 +139,15 @@ async def main(): print("Webpage summary:") print(webpage_summary) - # UNCOMMENT to run this MCPApp as an MCP server + # UNCOMMENT to run this MCPApp as an MCP server (also uncomment the import of create_mcp_server_for_app at the top) # NOTE: You can comment-out the above agent runs if you only want to run the server ######################################################### # Create the MCP server that exposes both workflows and agent configurations, # optionally using custom FastMCP settings - mcp_server = create_mcp_server_for_app(agent_app) + # mcp_server = create_mcp_server_for_app(agent_app) # # Run the server - await mcp_server.run_sse_async() + # await mcp_server.run_sse_async() if __name__ == "__main__": diff --git a/src/mcp_agent/data/templates/requirements.txt b/src/mcp_agent/data/templates/requirements.txt new file mode 100644 index 000000000..b33e65149 --- /dev/null +++ b/src/mcp_agent/data/templates/requirements.txt @@ -0,0 +1,5 @@ +mcp-agent + +# Optionally, include additional dependencies required for this project +openai +# anthropic diff --git a/src/mcp_agent/data/templates/secrets.yaml b/src/mcp_agent/data/templates/secrets.yaml index d476d47cd..d3d6a0cb0 100644 --- a/src/mcp_agent/data/templates/secrets.yaml +++ b/src/mcp_agent/data/templates/secrets.yaml @@ -5,23 +5,23 @@ # We default to OpenAI, but you can configure your preferred providers here. # You can also set these as environment variables instead openai: - api_key: "" # Or use OPENAI_API_KEY env var + api_key: "" # Or remove and use OPENAI_API_KEY env var # anthropic: -# api_key: "" # Or use ANTHROPIC_API_KEY env var +# api_key: "" # Or remove and use ANTHROPIC_API_KEY env var # google: -# api_key: "" # Or use GOOGLE_API_KEY env var +# api_key: "" # Or remove and use GOOGLE_API_KEY env var # azure: -# api_key: "" # Or use AZURE_API_KEY env var +# api_key: "" # Or remove and use AZURE_API_KEY env var # base_url: "" # https://your-resource.openai.azure.com/ # api_version: "2024-02-01" # # use_default_azure_credential: false # Set to true for DefaultAzureCredential # bedrock: -# aws_access_key_id: "" # Or use AWS_ACCESS_KEY_ID env var -# aws_secret_access_key: "" # Or use AWS_SECRET_ACCESS_KEY env var +# aws_access_key_id: "" # Or remove and use AWS_ACCESS_KEY_ID env var +# aws_secret_access_key: "" # Or remove and use AWS_SECRET_ACCESS_KEY env var # aws_region: "us-east-1" # MCP Server environment variables From ea3c709bcad0cb698207ffe613104320706636eb Mon Sep 17 00:00:00 2001 From: rholinshead <5060851+rholinshead@users.noreply.github.com> Date: Thu, 23 Oct 2025 01:04:10 -0400 Subject: [PATCH 3/4] Clarify secrets for deployment --- src/mcp_agent/data/templates/README_init.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mcp_agent/data/templates/README_init.md b/src/mcp_agent/data/templates/README_init.md index 9af14aff6..f48278fa8 100644 --- a/src/mcp_agent/data/templates/README_init.md +++ b/src/mcp_agent/data/templates/README_init.md @@ -17,7 +17,7 @@ Welcome! This project was generated by `mcp-agent init`. It’s a minimal, reada 1. Add your OpenAI API key to `mcp_agent.secrets.yaml` (or set `OPENAI_API_KEY` env var). -NOTE: You can use another supported provider (e.g. Anthropic) instead, just be sure to set its API key in the `mcp_agent.secrets.yaml` and import/use the relevant `AugmentedLLM` in `main.py`. +NOTE: You can use another supported provider (e.g. Anthropic) instead, just be sure to set its API key in the `mcp_agent.secrets.yaml` (or set its env var) and import/use the relevant `AugmentedLLM` in `main.py`. 2. Review `mcp_agent.config.yaml`: @@ -53,7 +53,7 @@ npx @modelcontextprotocol/inspector --transport sse --server-url http://127.0.0. 5. Deploy as a remote MCP server: -When you're ready to deploy, simply run: +When you're ready to deploy, ensure the required API keys are set in `mcp_agent.secrets.yaml` and then run: ```bash uv run mcp-agent deploy "hello_world" From 0387462d24399c66ca1d1c6ece86c68a099626aa Mon Sep 17 00:00:00 2001 From: rholinshead <5060851+rholinshead@users.noreply.github.com> Date: Thu, 23 Oct 2025 11:05:44 -0400 Subject: [PATCH 4/4] Update README --- src/mcp_agent/data/templates/README_init.md | 26 ++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/src/mcp_agent/data/templates/README_init.md b/src/mcp_agent/data/templates/README_init.md index f48278fa8..a424105fb 100644 --- a/src/mcp_agent/data/templates/README_init.md +++ b/src/mcp_agent/data/templates/README_init.md @@ -29,6 +29,7 @@ NOTE: You can use another supported provider (e.g. Anthropic) instead, just be s 3. Run locally: ```bash +uv pip install -r requirements.txt uv run main.py ``` @@ -56,11 +57,30 @@ npx @modelcontextprotocol/inspector --transport sse --server-url http://127.0.0. When you're ready to deploy, ensure the required API keys are set in `mcp_agent.secrets.yaml` and then run: ```bash -uv run mcp-agent deploy "hello_world" +uv run mcp-agent login ``` -- This wraps your app as a hosted MCP SSE server. -- Anything decorated with `@app.tool` (or `@app.async_tool`) runs as a Temporal workflow in the cloud. +to authenticate to mcp-agent cloud. You will be redirected to the login page, create an mcp-agent cloud account through Google or Github. + +Set up your mcp-agent cloud API Key and copy & paste it into your terminal + +```bash +INFO: Directing to MCP Agent Cloud API login... +Please enter your API key 🔑: +``` + +In your terminal, deploy the MCP app: + +```bash +uv run mcp-agent deploy hello_world +``` + +You will then be prompted to specify the type of secret to save your OpenAI API key as. Select (1) deployment secret so that it is available to the deployed server. + +The `deploy` command will bundle the app files and deploy them, wrapping your app as a hosted MCP SSE server with a URL of the form: +`https://.deployments.mcp-agent.com`. + +Anything decorated with `@app.tool` (or `@app.async_tool`) runs as a Temporal workflow in the cloud. Since the mcp-agent app is exposed as an MCP server, it can be used in any MCP client just like any other MCP server. For example, you can inspect and test the server using MCP Inspector: