Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ dependencies = [
"fastapi>=0.115.6",
"httpx>=0.28.1",
"jsonref>=1.1.0",
"mcp>=1.13.1",
"mcp>=1.18.0",
"numpy>=2.1.3",
"opentelemetry-distro>=0.50b0",
"opentelemetry-exporter-otlp-proto-http>=1.29.0",
Expand Down
143 changes: 130 additions & 13 deletions src/mcp_agent/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@
import os
import sys
import functools

from types import MethodType
from typing import (
Any,
Dict,
Iterable,
Mapping,
Optional,
Type,
TypeVar,
Expand All @@ -20,6 +21,8 @@

from mcp import ServerSession
from mcp.server.fastmcp import FastMCP
from mcp.types import ToolAnnotations, Icon

from mcp_agent.core.context import Context, initialize_context, cleanup_context
from mcp_agent.config import Settings, get_settings
from mcp_agent.executor.signal_registry import SignalRegistry
Expand Down Expand Up @@ -586,6 +589,7 @@ def _create_workflow_from_function(
async def _invoke_target(workflow_self, *args, **kwargs):
# Inject app_ctx (AppContext) and shim ctx (FastMCP Context) if requested by the function
import inspect as _inspect
import typing as _typing

call_kwargs = dict(kwargs)

Expand Down Expand Up @@ -622,24 +626,51 @@ async def _invoke_target(workflow_self, *args, **kwargs):
except Exception:
pass

# If the function expects a FastMCP Context (ctx/context), ensure it's present (None inside workflow)
# If the function expects a FastMCP Context (ctx/context), ensure it's present.
try:
from mcp.server.fastmcp import Context as _Ctx # type: ignore
except Exception:
_Ctx = None # type: ignore

def _is_fast_ctx_annotation(annotation) -> bool:
if _Ctx is None or annotation is _inspect._empty:
return False
if annotation is _Ctx:
return True
try:
origin = _typing.get_origin(annotation)
if origin is not None:
return any(
_is_fast_ctx_annotation(arg)
for arg in _typing.get_args(annotation)
)
except Exception:
pass
try:
return "fastmcp" in str(annotation)
except Exception:
return False

try:
sig = sig if "sig" in locals() else _inspect.signature(fn)
for p in sig.parameters.values():
if (
p.annotation is not _inspect._empty
and _Ctx is not None
and p.annotation is _Ctx
needs_fast_ctx = False
if _is_fast_ctx_annotation(p.annotation):
needs_fast_ctx = True
elif p.annotation is _inspect._empty and p.name in (
"ctx",
"context",
):
if p.name not in call_kwargs:
call_kwargs[p.name] = None
if p.name in ("ctx", "context") and p.name not in call_kwargs:
call_kwargs[p.name] = None
needs_fast_ctx = True
if needs_fast_ctx and p.name not in call_kwargs:
fast_ctx = getattr(workflow_self, "_mcp_request_context", None)
if fast_ctx is None and app_context_param_name:
fast_ctx = getattr(
call_kwargs.get(app_context_param_name, None),
"fastmcp",
None,
)
call_kwargs[p.name] = fast_ctx
except Exception:
pass

Expand Down Expand Up @@ -739,15 +770,23 @@ def tool(
self,
name: str | None = None,
*,
title: str | None = None,
description: str | None = None,
annotations: ToolAnnotations | Mapping[str, Any] | None = None,
icons: Iterable[Icon | Mapping[str, Any]] | None = None,
meta: Mapping[str, Any] | None = None,
structured_output: bool | None = None,
) -> Callable[[Callable[P, R]], Callable[P, R]]: ...

def tool(
self,
name: str | None = None,
*,
title: str | None = None,
description: str | None = None,
annotations: ToolAnnotations | Mapping[str, Any] | None = None,
icons: Iterable[Icon | Mapping[str, Any]] | None = None,
meta: Mapping[str, Any] | None = None,
structured_output: bool | None = None,
):
"""
Expand All @@ -766,6 +805,28 @@ def decorator(fn: Callable[P, R]) -> Callable[P, R]:

validate_tool_schema(fn, tool_name)

annotations_obj: ToolAnnotations | None = None
if annotations is not None:
if isinstance(annotations, ToolAnnotations):
annotations_obj = annotations
else:
annotations_obj = ToolAnnotations(**dict(annotations))

icons_list: list[Icon] | None = None
if icons is not None:
icons_list = []
for icon in icons:
if isinstance(icon, Icon):
icons_list.append(icon)
elif isinstance(icon, Mapping):
icons_list.append(Icon(**icon))
else:
raise TypeError("icons entries must be Icon or mapping")

meta_payload: Dict[str, Any] | None = None
if meta is not None:
meta_payload = dict(meta)

# Construct the workflow from function
workflow_cls = self._create_workflow_from_function(
fn,
Expand All @@ -784,13 +845,25 @@ def decorator(fn: Callable[P, R]) -> Callable[P, R]:
"source_fn": fn,
"structured_output": structured_output,
"description": description or (fn.__doc__ or ""),
"title": title,
"annotations": annotations_obj,
"icons": icons_list,
"meta": meta_payload,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should this be _meta?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmmm I looked at the MCP SDK and it looks like the decorator has meta, not _meta. Where did you see _meta?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah, _meta is what's used by openai apps -- https://developers.openai.com/apps-sdk/build/mcp-server

Looking at the MCP docs it seems it may be abused for that reason -- https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#_meta -- since supposed to be reserved for MCP itself

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So meta on the tool decorator will be set as _meta on the actual tool registration in MCP. See modelcontextprotocol/python-sdk#1463

However that change needs to land in pypi first so we can consume it

}
)

return fn

# Support bare usage: @app.tool without parentheses
if callable(name) and description is None and structured_output is None:
if (
callable(name)
and title is None
and description is None
and annotations is None
and icons is None
and meta is None
and structured_output is None
):
_fn = name # type: ignore[assignment]
name = None
return decorator(_fn) # type: ignore[arg-type]
Expand All @@ -805,14 +878,24 @@ def async_tool(
self,
name: str | None = None,
*,
title: str | None = None,
description: str | None = None,
annotations: ToolAnnotations | Mapping[str, Any] | None = None,
icons: Iterable[Icon | Mapping[str, Any]] | None = None,
meta: Mapping[str, Any] | None = None,
structured_output: bool | None = None,
) -> Callable[[Callable[P, R]], Callable[P, R]]: ...

def async_tool(
self,
name: str | None = None,
*,
title: str | None = None,
description: str | None = None,
annotations: ToolAnnotations | Mapping[str, Any] | None = None,
icons: Iterable[Icon | Mapping[str, Any]] | None = None,
meta: Mapping[str, Any] | None = None,
structured_output: bool | None = None,
):
"""
Decorator to declare an asynchronous MCP tool.
Expand All @@ -830,6 +913,28 @@ def decorator(fn: Callable[P, R]) -> Callable[P, R]:

validate_tool_schema(fn, workflow_name)

annotations_obj: ToolAnnotations | None = None
if annotations is not None:
if isinstance(annotations, ToolAnnotations):
annotations_obj = annotations
else:
annotations_obj = ToolAnnotations(**dict(annotations))

icons_list: list[Icon] | None = None
if icons is not None:
icons_list = []
for icon in icons:
if isinstance(icon, Icon):
icons_list.append(icon)
elif isinstance(icon, Mapping):
icons_list.append(Icon(**icon))
else:
raise TypeError("icons entries must be Icon or mapping")

meta_payload: Dict[str, Any] | None = None
if meta is not None:
meta_payload = dict(meta)

workflow_cls = self._create_workflow_from_function(
fn,
workflow_name=workflow_name,
Expand All @@ -844,14 +949,26 @@ def decorator(fn: Callable[P, R]) -> Callable[P, R]:
"workflow_name": workflow_name,
"workflow_cls": workflow_cls,
"source_fn": fn,
"structured_output": None,
"structured_output": structured_output,
"description": description or (fn.__doc__ or ""),
"title": title,
"annotations": annotations_obj,
"icons": icons_list,
"meta": meta_payload,
}
)
return fn

# Support bare usage: @app.async_tool without parentheses
if callable(name) and description is None:
if (
callable(name)
and title is None
and description is None
and annotations is None
and icons is None
and meta is None
and structured_output is None
):
_fn = name # type: ignore[assignment]
name = None
return decorator(_fn) # type: ignore[arg-type]
Expand Down
16 changes: 5 additions & 11 deletions src/mcp_agent/cli/cloud/commands/deploy/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@
from mcp_agent.cli.core.utils import run_async
from mcp_agent.cli.exceptions import CLIError
from mcp_agent.cli.mcp_app.api_client import MCPAppClient
from mcp_agent.cli.secrets.processor import (
process_config_secrets,
)
from mcp_agent.cli.secrets import processor as secrets_processor
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

Inconsistent with PR objectives: verify import path.

This import refactor appears unrelated to the PR's stated goal of making Context a derivative of FastMCP context. The change from direct function import to module alias assumes that mcp_agent.cli.secrets exports the processor module.

Run the following script to verify the import path is correct:


🏁 Script executed:

#!/bin/bash
# Verify that mcp_agent.cli.secrets exports processor module

# Check if __init__.py exists and what it exports
fd -t f "__init__.py" src/mcp_agent/cli/secrets/ --exec cat {}

# Verify processor.py exists
fd -t f "processor.py" src/mcp_agent/cli/secrets/

Length of output: 339


Critical issue: Import bypasses package's public API.

The import at line 27 attempts to import processor directly from mcp_agent.cli.secrets, but the package's __init__.py does not export it in __all__. While the processor.py file exists, this import violates the package's intended public API.

Update the import to use the explicit module path: from mcp_agent.cli.secrets.processor import process_config_secrets or add processor to the package's __all__ list in __init__.py.

🤖 Prompt for AI Agents
In src/mcp_agent/cli/cloud/commands/deploy/main.py around line 27, the code
imports the submodule via the package public API (importing processor from
mcp_agent.cli.secrets) which is not exported in the package __all__; change the
import to explicitly import the symbol from the concrete module (import
process_config_secrets from mcp_agent.cli.secrets.processor) and update any
local references to call that symbol, or alternatively add "processor" to
mcp_agent/cli/secrets/__init__.py __all__ so the original import is valid.

from mcp_agent.cli.utils.retry import retry_async_with_exponential_backoff, RetryError
from mcp_agent.cli.utils.ux import (
print_deployment_header,
Expand Down Expand Up @@ -173,9 +171,7 @@ def deploy_config(

if app_name is None:
if default_app_name:
print_info(
f"Using app name from config.yaml: '{default_app_name}'"
)
print_info(f"Using app name from config.yaml: '{default_app_name}'")
app_name = default_app_name
else:
app_name = "default"
Expand Down Expand Up @@ -205,7 +201,7 @@ def deploy_config(
" • Or use the --api-key flag with your key",
retriable=False,
)

if settings.VERBOSE:
print_info(f"Using API at {effective_api_url}")

Expand All @@ -231,9 +227,7 @@ def deploy_config(
print_info(f"New app id: `{app_id}`")
else:
short_id = f"{app_id[:8]}…"
print_success(
f"Found existing app '{app_name}' (ID: `{short_id}`)"
)
print_success(f"Found existing app '{app_name}' (ID: `{short_id}`)")
if not non_interactive:
use_existing = typer.confirm(
f"Deploy an update to '{app_name}' (ID: `{short_id}`)?",
Expand Down Expand Up @@ -292,7 +286,7 @@ def deploy_config(
secrets_transformed_path = config_dir / MCP_DEPLOYED_SECRETS_FILENAME

run_async(
process_config_secrets(
secrets_processor.process_config_secrets(
input_path=secrets_file,
output_path=secrets_transformed_path,
api_url=effective_api_url,
Expand Down
4 changes: 3 additions & 1 deletion src/mcp_agent/cli/cloud/commands/deploy/wrangler_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,9 @@ def ignore_patterns(path_str, names):
)
meta_vars.update({"MCP_DEPLOY_WORKSPACE_HASH": bundle_hash})
if settings.VERBOSE:
print_info(f"Deploying from non-git workspace (hash {bundle_hash[:12]}…)")
print_info(
f"Deploying from non-git workspace (hash {bundle_hash[:12]}…)"
)

# Write a breadcrumb file into the project so it ships with the bundle.
# Use a Python file for guaranteed inclusion without renaming.
Expand Down
Loading
Loading