Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions src/mcp_agent/workflows/llm/augmented_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
CallToolResult,
CreateMessageRequestParams,
CreateMessageResult,
ListToolsResult,
SamplingMessage,
TextContent,
PromptMessage,
Expand Down Expand Up @@ -679,3 +680,19 @@ def _gen_name(self, name: str | None, prefix: str | None) -> str:
identifier = str(self.context.executor.uuid())

return f"{prefix}-{identifier}"

# --- Agent convenience proxies -------------------------------------------------
async def list_tools(self, server_name: str | None = None) -> ListToolsResult:
"""Proxy to the underlying agent's list_tools for a simpler API."""
return await self.agent.list_tools(server_name=server_name)

async def close(self):
"""Close underlying agent connections."""
await self.agent.close()

async def __aenter__(self):
await self.agent.__aenter__()
return self

async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.agent.__aexit__(exc_type, exc_val, exc_tb)
Comment on lines +697 to +698
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Propagate the underlying __aexit__ return value

Agent.__aexit__() may return True to suppress an exception; dropping that value changes behaviour to “always re-raise”. Forward the result explicitly:

-    async def __aexit__(self, exc_type, exc_val, exc_tb):
-        await self.agent.__aexit__(exc_type, exc_val, exc_tb)
+    async def __aexit__(self, exc_type, exc_val, exc_tb):
+        return await self.agent.__aexit__(exc_type, exc_val, exc_tb)
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.agent.__aexit__(exc_type, exc_val, exc_tb)
async def __aexit__(self, exc_type, exc_val, exc_tb):
return await self.agent.__aexit__(exc_type, exc_val, exc_tb)
🤖 Prompt for AI Agents
In src/mcp_agent/workflows/llm/augmented_llm.py around lines 697 to 698, the
async __aexit__ method calls self.agent.__aexit__ but does not return its
result. Modify the method to return the awaited result of self.agent.__aexit__
so that any True value indicating exception suppression is properly propagated.

90 changes: 81 additions & 9 deletions src/mcp_agent/workflows/router/router_llm.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from typing import Callable, List, Literal, Optional, TYPE_CHECKING
from typing import Any, Callable, List, Literal, Optional, TYPE_CHECKING

from opentelemetry import trace
from pydantic import BaseModel

from mcp_agent.agents.agent import Agent
from mcp_agent.tracing.semconv import GEN_AI_REQUEST_TOP_K
from mcp_agent.tracing.telemetry import get_tracer
from mcp_agent.workflows.llm.augmented_llm import AugmentedLLM
from mcp_agent.workflows.llm.augmented_llm import AugmentedLLM, RequestParams
from mcp_agent.workflows.router.router_base import ResultT, Router, RouterResult
from mcp_agent.logging.logger import get_logger

Expand Down Expand Up @@ -79,9 +79,14 @@ class StructuredResponse(BaseModel):
"""A list of categories to route the input to."""


class LLMRouter(Router):
class LLMRouter(AugmentedLLM[Any, Any], Router):
"""
A router that uses an LLM to route an input to a specific category.
A router workflow that also behaves like an AugmentedLLM.

- As a Router: provides route/route_to_* APIs that return routing targets.
- As an AugmentedLLM: generate/generate_str/generate_structured delegate to routing
and return the routing outputs in unstructured or structured forms, enabling
composition with other AugmentedLLM-based workflows (Parallel, Evaluator/Optimizer, etc.).
"""

def __init__(
Expand All @@ -94,7 +99,9 @@ def __init__(
context: Optional["Context"] = None,
**kwargs,
):
super().__init__(
# Initialize Router side (category discovery, etc.)
Router.__init__(
self,
server_names=server_names,
agents=agents,
functions=functions,
Expand All @@ -103,7 +110,21 @@ def __init__(
**kwargs,
)

self.llm = llm
# Initialize AugmentedLLM side for workflow composition
# We do not use this class itself to call a provider; we delegate to the
# provided classifier LLM. Still, initializing allows uniform tracing/name.
AugmentedLLM.__init__(
self,
name=(f"{llm.name}-router" if getattr(llm, "name", None) else None),
instruction="You are a router workflow that returns categories.",
context=context,
**kwargs,
)

# Inner LLM that makes the routing decision
self.classifier_llm: AugmentedLLM = llm
# Back-compat alias
self.llm: AugmentedLLM = llm

@classmethod
async def create(
Expand Down Expand Up @@ -248,8 +269,8 @@ async def _route_with_llm(
context=context, request=request, top_k=top_k
)

# Get routes from LLM
response = await self.llm.generate_structured(
# Get routes from the inner/classifier LLM
response = await self.classifier_llm.generate_structured(
message=prompt,
response_model=StructuredResponse,
)
Expand Down Expand Up @@ -312,7 +333,8 @@ def _annotate_span_for_route_request(
return
span.set_attribute("request", request)
span.set_attribute(GEN_AI_REQUEST_TOP_K, top_k)
span.set_attribute("llm", self.llm.name)
if getattr(self.classifier_llm, "name", None):
span.set_attribute("llm", self.classifier_llm.name)
span.set_attribute(
"agents", [a.name for a in self.agents] if self.agents else []
)
Expand Down Expand Up @@ -372,3 +394,53 @@ def _generate_context(
idx += 1

return "\n\n".join(context_list)

# --- AugmentedLLM interface -------------------------------------------------
async def generate(
self,
message: str | Any | List[Any],
request_params: RequestParams | None = None,
) -> List[Any]:
"""Return routing results as a list for composition with other workflows.

The return value is a list of dicts: [{"category": name, "confidence": str, "reasoning": str?}]
"""
results = await self._route_with_llm(str(message), top_k=5)
payload = [
{
"category": (
r.result
if isinstance(r.result, str)
else (
r.result.name
if isinstance(r.result, Agent)
else getattr(r.result, "__name__", str(r.result))
)
),
"confidence": r.confidence,
"reasoning": r.reasoning,
}
for r in results
]
return payload # type: ignore[return-value]

async def generate_str(
self,
message: str | Any | List[Any],
request_params: RequestParams | None = None,
) -> str:
"""Return routing results as JSON string."""
import json

payload = await self.generate(message=message, request_params=request_params)
return json.dumps({"categories": payload})

async def generate_structured(
self,
message: str | Any | List[Any],
response_model: type[StructuredResponse],
request_params: RequestParams | None = None,
) -> StructuredResponse:
"""Return routing results as a StructuredResponse Pydantic model."""
txt = await self.generate_str(message=message, request_params=request_params)
return response_model.model_validate_json(txt)
2 changes: 2 additions & 0 deletions src/mcp_agent/workflows/router/router_llm_anthropic.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Callable, List, Optional, TYPE_CHECKING

from mcp_agent.agents.agent import Agent
from mcp_agent.workflows.llm.augmented_llm import AugmentedLLM
from mcp_agent.workflows.llm.augmented_llm_anthropic import AnthropicAugmentedLLM
from mcp_agent.workflows.router.router_llm import LLMRouter

Expand Down Expand Up @@ -49,6 +50,7 @@ def __init__(
@classmethod
async def create(
cls,
llm: AugmentedLLM | None = None,
server_names: List[str] | None = None,
Comment on lines +53 to 54
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

llm parameter is unused

create() now accepts llm: AugmentedLLM | None but doesn’t pass it to cls(...) or store it, so callers cannot inject a custom classifier LLM. Either:

  1. Wire it through (cls(llm=llm, …)) and add llm to __init__, or
  2. Drop the parameter until it’s needed.

Leaving it unused will trigger linters and confuse API consumers.

🤖 Prompt for AI Agents
In src/mcp_agent/workflows/router/router_llm_anthropic.py around lines 53 to 54,
the create() method has an llm parameter that is not used or passed to the class
constructor, causing confusion and linter warnings. To fix this, either pass llm
to cls(...) in create() and add llm as an __init__ parameter to store it
properly, or remove the llm parameter from create() until it is actually needed.

agents: List[Agent] | None = None,
functions: List[Callable] | None = None,
Expand Down
2 changes: 2 additions & 0 deletions src/mcp_agent/workflows/router/router_llm_openai.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from typing import Callable, List, Optional, TYPE_CHECKING

from mcp_agent.agents.agent import Agent
from mcp_agent.workflows.llm.augmented_llm import AugmentedLLM
from mcp_agent.workflows.llm.augmented_llm_openai import OpenAIAugmentedLLM
from mcp_agent.workflows.router.router_llm import LLMRouter

Expand Down Expand Up @@ -49,6 +50,7 @@ def __init__(
@classmethod
async def create(
cls,
llm: AugmentedLLM | None = None,
server_names: List[str] | None = None,
Comment on lines +53 to 54
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

llm parameter currently serves no purpose

The optional llm argument is accepted but ignored. Align implementation with intent (thread it into the instance or remove it) to avoid dead parameters and static-analysis warnings.

🤖 Prompt for AI Agents
In src/mcp_agent/workflows/router/router_llm_openai.py around lines 53 to 54,
the llm parameter is accepted but not used anywhere in the code, causing dead
code and static-analysis warnings. To fix this, either remove the llm parameter
entirely if it is not needed, or if it is intended to be used, assign it to an
instance attribute or otherwise integrate it into the class or function logic so
it serves a purpose.

agents: List[Agent] | None = None,
functions: List[Callable] | None = None,
Expand Down
Loading