-
Notifications
You must be signed in to change notification settings - Fork 772
Query temporal directly for workflow runs #471
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
dc2abb4
084cb16
a09f09b
32dd041
445210c
6d13d24
4f06ad5
b0facff
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -1,5 +1,5 @@ | ||||||||||||||
| import asyncio | ||||||||||||||
|
|
||||||||||||||
| import base64 | ||||||||||||||
| from typing import ( | ||||||||||||||
| Any, | ||||||||||||||
| Dict, | ||||||||||||||
|
|
@@ -9,7 +9,7 @@ | |||||||||||||
| ) | ||||||||||||||
|
|
||||||||||||||
| from mcp_agent.logging.logger import get_logger | ||||||||||||||
| from mcp_agent.executor.workflow_registry import WorkflowRegistry | ||||||||||||||
| from mcp_agent.executor.workflow_registry import WorkflowRegistry, WorkflowRunsPage | ||||||||||||||
|
|
||||||||||||||
| if TYPE_CHECKING: | ||||||||||||||
| from mcp_agent.executor.temporal import TemporalExecutor | ||||||||||||||
|
|
@@ -216,23 +216,140 @@ async def get_workflow_status( | |||||||||||||
|
|
||||||||||||||
| return status_dict | ||||||||||||||
|
|
||||||||||||||
| async def list_workflow_statuses(self) -> List[Dict[str, Any]]: | ||||||||||||||
| result = [] | ||||||||||||||
| for run_id, workflow in self._local_workflows.items(): | ||||||||||||||
| # Get the workflow status directly to have consistent behavior | ||||||||||||||
| status = await workflow.get_status() | ||||||||||||||
| workflow_id = workflow.id or workflow.name | ||||||||||||||
| async def list_workflow_statuses( | ||||||||||||||
| self, | ||||||||||||||
| *, | ||||||||||||||
| query: str | None = None, | ||||||||||||||
| limit: int | None = None, | ||||||||||||||
| page_size: int | None = None, | ||||||||||||||
| next_page_token: bytes | None = None, | ||||||||||||||
| rpc_metadata: Dict[str, str] | None = None, | ||||||||||||||
| rpc_timeout: Any | None = None, | ||||||||||||||
| ) -> List[Dict[str, Any]] | WorkflowRunsPage: | ||||||||||||||
| """ | ||||||||||||||
| List workflow runs by querying Temporal visibility (preferred). | ||||||||||||||
|
|
||||||||||||||
| # Query Temporal for the status | ||||||||||||||
| temporal_status = await self._get_temporal_workflow_status( | ||||||||||||||
| workflow_id=workflow_id, run_id=run_id | ||||||||||||||
| ) | ||||||||||||||
| - When Temporal listing succeeds, only runs returned by Temporal are included; local | ||||||||||||||
| cache is used to enrich entries where possible. | ||||||||||||||
| - On failure or when listing is unsupported, fall back to locally tracked runs. | ||||||||||||||
|
|
||||||||||||||
| status["temporal"] = temporal_status | ||||||||||||||
| Args: | ||||||||||||||
| query: Optional Temporal visibility list filter; defaults to newest first when unset. | ||||||||||||||
| limit: Maximum number of runs to return; enforced locally if backend doesn't apply it. | ||||||||||||||
| page_size: Page size to request from Temporal, if supported by SDK version. | ||||||||||||||
| next_page_token: Opaque pagination token from prior call, if supported by SDK version. | ||||||||||||||
| rpc_metadata: Optional per-RPC headers for Temporal (not exposed via server tool). | ||||||||||||||
| rpc_timeout: Optional per-RPC timeout (not exposed via server tool). | ||||||||||||||
|
|
||||||||||||||
| result.append(status) | ||||||||||||||
| Returns: | ||||||||||||||
| A list of dictionaries with workflow information, or a WorkflowRunsPage object. | ||||||||||||||
| """ | ||||||||||||||
| results: List[Dict[str, Any]] = [] | ||||||||||||||
|
|
||||||||||||||
| return result | ||||||||||||||
| # Collect all executions for this task queue (best effort) | ||||||||||||||
| try: | ||||||||||||||
| await self._executor.ensure_client() | ||||||||||||||
| client = self._executor.client | ||||||||||||||
|
|
||||||||||||||
| # Use caller query if provided; else default to newest first | ||||||||||||||
| query_local = query or "order by StartTime desc" | ||||||||||||||
|
|
||||||||||||||
| iterator = client.list_workflows( | ||||||||||||||
| query=query_local, | ||||||||||||||
| limit=limit, | ||||||||||||||
| page_size=page_size, | ||||||||||||||
| next_page_token=next_page_token, | ||||||||||||||
| rpc_metadata=rpc_metadata, | ||||||||||||||
| rpc_timeout=rpc_timeout, | ||||||||||||||
| ) | ||||||||||||||
|
|
||||||||||||||
| # Build quick lookup from local cache by (workflow_id, run_id) | ||||||||||||||
| in_memory_workflows: Dict[tuple[str, str], "Workflow"] = {} | ||||||||||||||
| for run_id, wf in self._local_workflows.items(): | ||||||||||||||
| workflow_id = wf.id or wf.name | ||||||||||||||
| if workflow_id and run_id: | ||||||||||||||
| in_memory_workflows[(workflow_id, run_id)] = wf | ||||||||||||||
|
|
||||||||||||||
| count = 0 | ||||||||||||||
| max_count = limit if isinstance(limit, int) and limit > 0 else None | ||||||||||||||
|
|
||||||||||||||
| async for workflow_info in iterator: | ||||||||||||||
| # Extract workflow_id and run_id robustly from various shapes | ||||||||||||||
| workflow_id = workflow_info.id | ||||||||||||||
| run_id = workflow_info.run_id | ||||||||||||||
|
|
||||||||||||||
| if not workflow_id or not run_id: | ||||||||||||||
| # Can't build a handle without both IDs | ||||||||||||||
| continue | ||||||||||||||
|
|
||||||||||||||
| # If we have a local workflow, start with its detailed status | ||||||||||||||
| wf = in_memory_workflows.get((workflow_id, run_id)) | ||||||||||||||
| if wf is not None: | ||||||||||||||
| status_dict = await wf.get_status() | ||||||||||||||
| else: | ||||||||||||||
| # Create a minimal status when not tracked locally | ||||||||||||||
| status_dict = { | ||||||||||||||
| "id": run_id, | ||||||||||||||
| "workflow_id": workflow_id, | ||||||||||||||
| "run_id": run_id, | ||||||||||||||
| "name": workflow_info.workflow_type or workflow_id, | ||||||||||||||
| "status": "unknown", | ||||||||||||||
| "running": False, | ||||||||||||||
| "state": {"status": "unknown", "metadata": {}, "error": None}, | ||||||||||||||
| } | ||||||||||||||
|
|
||||||||||||||
| # Merge Temporal visibility/describe details | ||||||||||||||
| temporal_status = await self._get_temporal_workflow_status( | ||||||||||||||
| workflow_id=workflow_id, run_id=run_id | ||||||||||||||
| ) | ||||||||||||||
|
|
||||||||||||||
| status_dict["temporal"] = temporal_status | ||||||||||||||
|
|
||||||||||||||
| # Try to reflect Temporal status into top-level summary | ||||||||||||||
| try: | ||||||||||||||
| ts = ( | ||||||||||||||
| temporal_status.get("status") | ||||||||||||||
| if isinstance(temporal_status, dict) | ||||||||||||||
| else None | ||||||||||||||
| ) | ||||||||||||||
| if isinstance(ts, str): | ||||||||||||||
| status_dict["status"] = ts.lower() | ||||||||||||||
| status_dict["running"] = ts.upper() in {"RUNNING", "OPEN"} | ||||||||||||||
| except Exception: | ||||||||||||||
| pass | ||||||||||||||
|
|
||||||||||||||
coderabbitai[bot] marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||||||||||||||
| results.append(status_dict) | ||||||||||||||
| count += 1 | ||||||||||||||
| if max_count is not None and count >= max_count: | ||||||||||||||
| break | ||||||||||||||
|
|
||||||||||||||
| if iterator.next_page_token: | ||||||||||||||
| return WorkflowRunsPage( | ||||||||||||||
| runs=results, | ||||||||||||||
| next_page_token=base64.b64encode(iterator.next_page_token).decode( | ||||||||||||||
| "ascii" | ||||||||||||||
| ), | ||||||||||||||
|
||||||||||||||
| next_page_token=base64.b64encode(iterator.next_page_token).decode( | |
| "ascii" | |
| ), | |
| next_page_token=base64.b64encode( | |
| iterator.next_page_token.encode('utf-8') if isinstance(iterator.next_page_token, str) else iterator.next_page_token | |
| ).decode("ascii"), |
Spotted by Diamond
Is this helpful? React 👍 or 👎 to let us know.
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -22,8 +22,9 @@ | |||||||||
| from mcp_agent.core.context_dependent import ContextDependent | ||||||||||
| from mcp_agent.executor.workflow import Workflow | ||||||||||
| from mcp_agent.executor.workflow_registry import ( | ||||||||||
| WorkflowRegistry, | ||||||||||
| InMemoryWorkflowRegistry, | ||||||||||
| WorkflowRegistry, | ||||||||||
| WorkflowRunsPage, | ||||||||||
| ) | ||||||||||
|
|
||||||||||
| from mcp_agent.logging.logger import get_logger | ||||||||||
|
|
@@ -1043,16 +1044,27 @@ def list_workflows(ctx: MCPContext) -> Dict[str, Dict[str, Any]]: | |||||||||
| return result | ||||||||||
|
|
||||||||||
| @mcp.tool(name="workflows-runs-list") | ||||||||||
| async def list_workflow_runs(ctx: MCPContext) -> List[Dict[str, Any]]: | ||||||||||
| async def list_workflow_runs( | ||||||||||
| ctx: MCPContext, | ||||||||||
| limit: int = 100, | ||||||||||
| page_size: int | None = 100, | ||||||||||
| next_page_token: str | None = None, | ||||||||||
| ) -> List[Dict[str, Any]] | WorkflowRunsPage: | ||||||||||
| """ | ||||||||||
| List all workflow instances (runs) with their detailed status information. | ||||||||||
|
|
||||||||||
| This returns information about actual workflow instances (runs), not workflow types. | ||||||||||
| For each running workflow, returns its ID, name, current state, and available operations. | ||||||||||
| This helps in identifying and managing active workflow instances. | ||||||||||
|
|
||||||||||
|
|
||||||||||
| Args: | ||||||||||
| limit: Maximum number of runs to return. Default: 100. | ||||||||||
| page_size: Page size for paginated backends. Default: 100. | ||||||||||
| next_page_token: Optional Base64-encoded token for pagination resume. Only provide if you received a next_page_token from a previous call. | ||||||||||
|
|
||||||||||
| Returns: | ||||||||||
| A dictionary mapping workflow instance IDs to their detailed status information. | ||||||||||
| A list of workflow run status dictionaries with detailed workflow information. | ||||||||||
| """ | ||||||||||
| # Ensure upstream session is set for any logs emitted during this call | ||||||||||
| try: | ||||||||||
|
|
@@ -1066,10 +1078,26 @@ async def list_workflow_runs(ctx: MCPContext) -> List[Dict[str, Any]]: | |||||||||
| if server_context is None or not hasattr(server_context, "workflow_registry"): | ||||||||||
| raise ToolError("Server context not available for MCPApp Server.") | ||||||||||
|
|
||||||||||
| # Get all workflow statuses from the registry | ||||||||||
| # Decode next_page_token if provided (base64-encoded string -> bytes) | ||||||||||
| token_bytes = None | ||||||||||
| if next_page_token: | ||||||||||
| try: | ||||||||||
| import base64 as _b64 | ||||||||||
|
|
||||||||||
| token_bytes = _b64.b64decode(next_page_token) | ||||||||||
| except Exception: | ||||||||||
| token_bytes = None | ||||||||||
|
|
||||||||||
| # Get workflow statuses from the registry with pagination/query hints | ||||||||||
| workflow_statuses = ( | ||||||||||
| await server_context.workflow_registry.list_workflow_statuses() | ||||||||||
| await server_context.workflow_registry.list_workflow_statuses( | ||||||||||
| query=None, | ||||||||||
| limit=limit, | ||||||||||
| page_size=page_size, | ||||||||||
| next_page_token=token_bytes, | ||||||||||
| ) | ||||||||||
| ) | ||||||||||
|
|
||||||||||
| return workflow_statuses | ||||||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ensure JSON-serializable return (Pydantic to dict). FastMCP tools should return JSON-serializable data. If the registry returns a WorkflowRunsPage (Pydantic), convert it to a dict before returning. - return workflow_statuses
+ if isinstance(workflow_statuses, WorkflowRunsPage):
+ return workflow_statuses.model_dump(by_alias=True, mode="json", exclude_none=True)
+ return workflow_statuses📝 Committable suggestion
Suggested change
🤖 Prompt for AI Agents |
||||||||||
|
|
||||||||||
| @mcp.tool(name="workflows-run") | ||||||||||
|
|
||||||||||
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Just noting for later -- when we push support for multiple users using a server, we'll need to add the auth checks in the proxy layer for this request, which will have implications for results returned (e.g. we'll only return those that the user has permission to see)
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Good point, let me mark that as a TODO comment in the code as well. I think this is reasonable for the time being but agreed we'll want this to be user-scoped in the future.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Fwiw, this client code shouldn't really need to know/care about it, as long as our rpc metadata passes through the api key. Our backend proxy layer will handle the auth checks, but will need to handle pagination and such with some in-memory processing