diff --git a/README.md b/README.md index 48d1c1742..39769e4bf 100644 --- a/README.md +++ b/README.md @@ -487,6 +487,188 @@ def get_temperature(city: str) -> float: _Full example: [examples/snippets/servers/structured_output.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/structured_output.py)_ +#### Async Tools + +Tools can be configured to run asynchronously, allowing for long-running operations that execute in the background while clients poll for status and results. Async tools currently require protocol version `next` and support operation tokens for tracking execution state. + +Tools can specify their invocation mode: `sync` (default), `async`, or `["sync", "async"]` for hybrid tools that support both patterns. Async tools can provide immediate feedback while continuing to execute, and support configurable keep-alive duration for result availability. + + +```python +""" +Basic async tool example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_basic stdio +""" + +import anyio + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession + +mcp = FastMCP("Async Tool Basic") + + +@mcp.tool(invocation_modes=["async"]) +async def analyze_data(dataset: str, ctx: Context[ServerSession, None]) -> str: + """Analyze a dataset asynchronously with progress updates.""" + await ctx.info(f"Starting analysis of {dataset}") + + # Simulate analysis with progress updates + for i in range(5): + await anyio.sleep(0.5) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") + + await ctx.info("Analysis complete") + return f"Analysis results for {dataset}: 95% accuracy achieved" + + +@mcp.tool(invocation_modes=["sync", "async"]) +async def process_text(text: str, ctx: Context[ServerSession, None]) -> str: + """Process text in sync or async mode.""" + + await ctx.info(f"Processing text asynchronously: {text[:20]}...") + await anyio.sleep(0.3) + + return f"Processed: {text.upper()}" + + +@mcp.tool() +async def process_text_sync(text: str, ctx: Context[ServerSession, None]) -> str: + """Process text in sync mode only.""" + + await ctx.info(f"Processing text: {text[:20]}...") + await anyio.sleep(0.3) + + return f"Processed: {text.upper()}" + + +if __name__ == "__main__": + mcp.run() +``` + +_Full example: [examples/snippets/servers/async_tool_basic.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/async_tool_basic.py)_ + + +Tools can also provide immediate feedback while continuing to execute asynchronously: + + +```python +""" +Async tool with immediate result example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_immediate stdio +""" + +import anyio + +from mcp import types +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession + +mcp = FastMCP("Async Tool Immediate") + + +async def provide_immediate_feedback(operation: str) -> list[types.ContentBlock]: + """Provide immediate feedback while async operation starts.""" + return [types.TextContent(type="text", text=f"Starting {operation} operation. This will take a moment.")] + + +@mcp.tool(invocation_modes=["async"], immediate_result=provide_immediate_feedback) +async def long_analysis(operation: str, ctx: Context[ServerSession, None]) -> str: + """Perform long-running analysis with immediate user feedback.""" + await ctx.info(f"Beginning {operation} analysis") + + # Simulate long-running work + for i in range(4): + await anyio.sleep(1) + progress = (i + 1) / 4 + await ctx.report_progress(progress, 1.0, f"Analysis step {i + 1}/4") + + return f"Analysis '{operation}' completed with detailed results" + + +if __name__ == "__main__": + mcp.run() +``` + +_Full example: [examples/snippets/servers/async_tool_immediate.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/async_tool_immediate.py)_ + + +Clients using protocol version `next` can interact with async tools by polling operation status and retrieving results: + + +```python +""" +Client example for async tools. + +cd to the `examples/snippets` directory and run: + uv run async-tool-client +""" + +import os + +import anyio + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Server parameters for async tool example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_basic", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def call_async_tool(session: ClientSession): + """Demonstrate calling an async tool.""" + print("Calling async tool...") + + result = await session.call_tool("analyze_data", arguments={"dataset": "customer_data.csv"}) + + if result.operation: + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await anyio.sleep(0.5) + + +async def run(): + """Run the async tool client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + await call_async_tool(session) + + +if __name__ == "__main__": + anyio.run(run) +``` + +_Full example: [examples/snippets/clients/async_tool_client.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/clients/async_tool_client.py)_ + + +The `@mcp.tool()` decorator accepts `invocation_modes` to specify supported execution patterns, `immediate_result` to provide instant feedback for async tools, and `keep_alive` to set how long operation results remain available (default: 300 seconds). + ### Prompts Prompts are reusable templates that help LLMs interact with your server effectively: @@ -1479,6 +1661,40 @@ For more information on mounting applications in Starlette, see the [Starlette d ## Advanced Usage +### Persistent Async Operations + +For production deployments, you may want async operations to survive server restarts. The `ServerAsyncOperationManager` uses pluggable `AsyncOperationStore` and `AsyncOperationBroker` components to handle operation persistence and task queuing. + +#### Operation Lifecycle + +Async operations follow this lifecycle: + +1. **Submitted** - Operation token generated and stored +2. **Working** - Task begins execution +3. **Completed/Failed/Cancelled** - Operation reaches terminal state with results + +#### Custom Store and Broker + +```python +from mcp.server.fastmcp import FastMCP +from mcp.shared.async_operations import ServerAsyncOperationManager + +# Create custom store and broker implementations +custom_store = MyAsyncOperationStore() +custom_broker = MyAsyncOperationBroker() + +# Create operation manager with custom components +operation_manager = ServerAsyncOperationManager( + store=custom_store, + broker=custom_broker +) + +# Use with FastMCP +mcp = FastMCP("My Server", async_operations=operation_manager) +``` + +For a complete SQLite-based implementation example, see [`examples/servers/sqlite-async-operations/`](examples/servers/sqlite-async-operations/). + ### Low-Level Server For more control, you can use the low-level server implementation directly. This gives you full access to the protocol and allows you to customize every aspect of your server, including lifecycle management through the lifespan API: diff --git a/examples/clients/async-reconnect-client/README.md b/examples/clients/async-reconnect-client/README.md new file mode 100644 index 000000000..9d3106c41 --- /dev/null +++ b/examples/clients/async-reconnect-client/README.md @@ -0,0 +1,79 @@ +# Async Reconnect Client Example + +A demonstration of how to use the MCP Python SDK to call async tools and handle operation tokens for resuming long-running operations. + +## Features + +- Async tool invocation with operation tokens +- Operation status polling and result retrieval +- Support for resuming operations with existing tokens + +## Installation + +```bash +cd examples/clients/async-reconnect-client +uv sync --reinstall +``` + +## Usage + +### 1. Start an MCP server with async tools + +```bash +# Example with simple-tool-async server +cd examples/servers/simple-tool-async +uv run mcp-simple-tool-async --transport streamable-http --port 8000 +``` + +### 2. Run the client + +```bash +# Connect to default endpoint +uv run mcp-async-reconnect-client + +# Connect to custom endpoint +uv run mcp-async-reconnect-client --endpoint http://localhost:3001/mcp + +# Resume with existing operation token +uv run mcp-async-reconnect-client --token your-operation-token-here +``` + +## Example + +The client will call the `fetch_website` async tool and demonstrate: + +1. Starting an async operation and receiving an operation token +2. Polling the operation status until completion +3. Retrieving the final result when the operation completes + +```bash +$ uv run mcp-async-reconnect-client +Calling async tool... +Operation started with token: abc123... +Status: submitted +Status: working +Status: completed +Result: ... +``` + +The client can be terminated during polling and resumed with the returned token, demonstrating how reconnection is supported: + +```bash +$ uv run mcp-async-reconnect-client +Calling async tool... +Operation started with token: abc123... +Status: working +^C +Aborted! +$ uv run mcp-async-reconnect-client --token=abc123... +Calling async tool... +Status: completed +Result: ... +``` + +## Configuration + +- `--endpoint` - MCP server endpoint (default: ) +- `--token` - Operation token to resume with (optional) + +This example showcases the async tool capabilities introduced in MCP protocol version "next", allowing for long-running operations that can be resumed even if the client disconnects. diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/__init__.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py new file mode 100644 index 000000000..6fa3af9ce --- /dev/null +++ b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py @@ -0,0 +1,48 @@ +import anyio +import click +from mcp import ClientSession, types +from mcp.client.streamable_http import streamablehttp_client + + +async def call_async_tool(session: ClientSession, token: str | None): + """Demonstrate calling an async tool.""" + print("Calling async tool...") + + if not token: + result = await session.call_tool("fetch_website", arguments={"url": "https://modelcontextprotocol.io"}) + if result.isError: + raise RuntimeError(f"Error calling tool: {result}") + assert result.operation + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await anyio.sleep(0.5) + + +async def run_session(endpoint: str, token: str | None): + async with streamablehttp_client(endpoint) as (read, write, _): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + await call_async_tool(session, token) + + +@click.command() +@click.option("--endpoint", default="http://127.0.0.1:8000/mcp", help="Endpoint to connect to") +@click.option("--token", default=None, help="Operation token to resume with") +def main(endpoint: str, token: str | None): + anyio.run(run_session, endpoint, token) diff --git a/examples/clients/async-reconnect-client/pyproject.toml b/examples/clients/async-reconnect-client/pyproject.toml new file mode 100644 index 000000000..53c66ea28 --- /dev/null +++ b/examples/clients/async-reconnect-client/pyproject.toml @@ -0,0 +1,49 @@ +[project] +name = "mcp-async-reconnect-client" +version = "0.1.0" +description = "A client for the MCP simple-tool-async server that supports reconnection" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic" }] +keywords = ["mcp", "client", "async"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["click>=8.2.0", "mcp>=1.0.0"] + +[project.scripts] +mcp-async-reconnect-client = "mcp_async_reconnect_client.client:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_async_reconnect_client"] + +[tool.pyright] +include = ["mcp_async_reconnect_client"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] + +[tool.uv.sources] +mcp = { path = "../../../" } + +[[tool.uv.index]] +url = "https://pypi.org/simple" diff --git a/examples/clients/async-reconnect-client/uv.lock b/examples/clients/async-reconnect-client/uv.lock new file mode 100644 index 000000000..21173abdc --- /dev/null +++ b/examples/clients/async-reconnect-client/uv.lock @@ -0,0 +1,761 @@ +version = 1 +revision = 2 +requires-python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "mcp" +source = { directory = "../../../" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "httpx", specifier = ">=0.27.1" }, + { name = "httpx-sse", specifier = ">=0.4" }, + { name = "jsonschema", specifier = ">=4.20.0" }, + { name = "pydantic", specifier = ">=2.11.0,<3.0.0" }, + { name = "pydantic-settings", specifier = ">=2.5.2" }, + { name = "python-dotenv", marker = "extra == 'cli'", specifier = ">=1.0.0" }, + { name = "python-multipart", specifier = ">=0.0.9" }, + { name = "pywin32", marker = "sys_platform == 'win32'", specifier = ">=310" }, + { name = "rich", marker = "extra == 'rich'", specifier = ">=13.9.4" }, + { name = "sse-starlette", specifier = ">=1.6.1" }, + { name = "starlette", specifier = ">=0.27" }, + { name = "typer", marker = "extra == 'cli'", specifier = ">=0.16.0" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'", specifier = ">=0.31.1" }, + { name = "websockets", marker = "extra == 'ws'", specifier = ">=15.0.1" }, +] +provides-extras = ["cli", "rich", "ws"] + +[package.metadata.requires-dev] +dev = [ + { name = "dirty-equals", specifier = ">=0.9.0" }, + { name = "inline-snapshot", specifier = ">=0.23.0" }, + { name = "pyright", specifier = ">=1.1.400" }, + { name = "pytest", specifier = ">=8.3.4" }, + { name = "pytest-examples", specifier = ">=0.0.14" }, + { name = "pytest-flakefinder", specifier = ">=1.1.0" }, + { name = "pytest-pretty", specifier = ">=1.2.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "ruff", specifier = ">=0.8.5" }, + { name = "trio", specifier = ">=0.26.2" }, +] +docs = [ + { name = "mkdocs", specifier = ">=1.6.1" }, + { name = "mkdocs-glightbox", specifier = ">=0.4.0" }, + { name = "mkdocs-material", extras = ["imaging"], specifier = ">=9.5.45" }, + { name = "mkdocstrings-python", specifier = ">=1.12.2" }, +] + +[[package]] +name = "mcp-async-reconnect-client" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.2.0" }, + { name = "mcp", directory = "../../../" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.379" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/ed/3aef893e2dd30e77e35d20d4ddb45ca459db59cead748cad9796ad479411/rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef", size = 371606, upload-time = "2025-08-27T12:12:25.189Z" }, + { url = "https://files.pythonhosted.org/packages/6d/82/9818b443e5d3eb4c83c3994561387f116aae9833b35c484474769c4a8faf/rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be", size = 353452, upload-time = "2025-08-27T12:12:27.433Z" }, + { url = "https://files.pythonhosted.org/packages/99/c7/d2a110ffaaa397fc6793a83c7bd3545d9ab22658b7cdff05a24a4535cc45/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61", size = 381519, upload-time = "2025-08-27T12:12:28.719Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bc/e89581d1f9d1be7d0247eaef602566869fdc0d084008ba139e27e775366c/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb", size = 394424, upload-time = "2025-08-27T12:12:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/ac/2e/36a6861f797530e74bb6ed53495f8741f1ef95939eed01d761e73d559067/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657", size = 523467, upload-time = "2025-08-27T12:12:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/c1bc2be32564fa499f988f0a5c6505c2f4746ef96e58e4d7de5cf923d77e/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013", size = 402660, upload-time = "2025-08-27T12:12:33.444Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ec/ef8bf895f0628dd0a59e54d81caed6891663cb9c54a0f4bb7da918cb88cf/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a", size = 384062, upload-time = "2025-08-27T12:12:34.857Z" }, + { url = "https://files.pythonhosted.org/packages/69/f7/f47ff154be8d9a5e691c083a920bba89cef88d5247c241c10b9898f595a1/rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1", size = 401289, upload-time = "2025-08-27T12:12:36.085Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d9/ca410363efd0615814ae579f6829cafb39225cd63e5ea5ed1404cb345293/rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10", size = 417718, upload-time = "2025-08-27T12:12:37.401Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a0/8cb5c2ff38340f221cc067cc093d1270e10658ba4e8d263df923daa18e86/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808", size = 558333, upload-time = "2025-08-27T12:12:38.672Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/1b0de79177c5d5103843774ce12b84caa7164dfc6cd66378768d37db11bf/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8", size = 589127, upload-time = "2025-08-27T12:12:41.48Z" }, + { url = "https://files.pythonhosted.org/packages/c8/5e/26abb098d5e01266b0f3a2488d299d19ccc26849735d9d2b95c39397e945/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9", size = 554899, upload-time = "2025-08-27T12:12:42.925Z" }, + { url = "https://files.pythonhosted.org/packages/de/41/905cc90ced13550db017f8f20c6d8e8470066c5738ba480d7ba63e3d136b/rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4", size = 217450, upload-time = "2025-08-27T12:12:44.813Z" }, + { url = "https://files.pythonhosted.org/packages/75/3d/6bef47b0e253616ccdf67c283e25f2d16e18ccddd38f92af81d5a3420206/rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1", size = 228447, upload-time = "2025-08-27T12:12:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, + { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, + { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, + { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/d5/63/b7cc415c345625d5e62f694ea356c58fb964861409008118f1245f8c3347/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf", size = 371360, upload-time = "2025-08-27T12:15:29.218Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/12e1b24b560cf378b8ffbdb9dc73abd529e1adcfcf82727dfd29c4a7b88d/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3", size = 353933, upload-time = "2025-08-27T12:15:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/9b/85/1bb2210c1f7a1b99e91fea486b9f0f894aa5da3a5ec7097cbad7dec6d40f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636", size = 382962, upload-time = "2025-08-27T12:15:32.348Z" }, + { url = "https://files.pythonhosted.org/packages/cc/c9/a839b9f219cf80ed65f27a7f5ddbb2809c1b85c966020ae2dff490e0b18e/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8", size = 394412, upload-time = "2025-08-27T12:15:33.839Z" }, + { url = "https://files.pythonhosted.org/packages/02/2d/b1d7f928b0b1f4fc2e0133e8051d199b01d7384875adc63b6ddadf3de7e5/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc", size = 523972, upload-time = "2025-08-27T12:15:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/a9/af/2cbf56edd2d07716df1aec8a726b3159deb47cb5c27e1e42b71d705a7c2f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8", size = 403273, upload-time = "2025-08-27T12:15:37.051Z" }, + { url = "https://files.pythonhosted.org/packages/c0/93/425e32200158d44ff01da5d9612c3b6711fe69f606f06e3895511f17473b/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc", size = 385278, upload-time = "2025-08-27T12:15:38.571Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1a/1a04a915ecd0551bfa9e77b7672d1937b4b72a0fc204a17deef76001cfb2/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71", size = 402084, upload-time = "2025-08-27T12:15:40.529Z" }, + { url = "https://files.pythonhosted.org/packages/51/f7/66585c0fe5714368b62951d2513b684e5215beaceab2c6629549ddb15036/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad", size = 419041, upload-time = "2025-08-27T12:15:42.191Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7e/83a508f6b8e219bba2d4af077c35ba0e0cdd35a751a3be6a7cba5a55ad71/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab", size = 560084, upload-time = "2025-08-27T12:15:43.839Z" }, + { url = "https://files.pythonhosted.org/packages/66/66/bb945683b958a1b19eb0fe715594630d0f36396ebdef4d9b89c2fa09aa56/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059", size = 590115, upload-time = "2025-08-27T12:15:46.647Z" }, + { url = "https://files.pythonhosted.org/packages/12/00/ccfaafaf7db7e7adace915e5c2f2c2410e16402561801e9c7f96683002d3/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b", size = 556561, upload-time = "2025-08-27T12:15:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/e1/b7/92b6ed9aad103bfe1c45df98453dfae40969eef2cb6c6239c58d7e96f1b3/rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819", size = 229125, upload-time = "2025-08-27T12:15:49.956Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, + { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, +] + +[[package]] +name = "ruff" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" }, + { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" }, + { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" }, + { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" }, + { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" }, + { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" }, + { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" }, + { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" }, + { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" }, + { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" }, + { url = "https://files.pythonhosted.org/packages/f7/00/e7f1501e81e8ec290e79527827af1d88f541d8d26151751b46108978dade/ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac", size = 12245990, upload-time = "2025-09-25T14:54:03.647Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bd/d9f33a73de84fafd0146c6fba4f497c4565fe8fa8b46874b8e438869abc2/ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585", size = 13324004, upload-time = "2025-09-25T14:54:06.05Z" }, + { url = "https://files.pythonhosted.org/packages/c3/12/28fa2f597a605884deb0f65c1b1ae05111051b2a7030f5d8a4ff7f4599ba/ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7", size = 12484437, upload-time = "2025-09-25T14:54:08.022Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, +] + +[[package]] +name = "starlette" +version = "0.48.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, +] diff --git a/examples/servers/simple-tool-async/.python-version b/examples/servers/simple-tool-async/.python-version new file mode 100644 index 000000000..c8cfe3959 --- /dev/null +++ b/examples/servers/simple-tool-async/.python-version @@ -0,0 +1 @@ +3.10 diff --git a/examples/servers/simple-tool-async/README.md b/examples/servers/simple-tool-async/README.md new file mode 100644 index 000000000..20878261f --- /dev/null +++ b/examples/servers/simple-tool-async/README.md @@ -0,0 +1,56 @@ +# Simple Tool Async Example + +A simple MCP server that demonstrates async tool execution with operation tokens and long-running operations. + +## Usage + +Start the server using either stdio (default) or streamable-http transport: + +```bash +# Using stdio transport (default) +uv run mcp-simple-tool-async + +# Using streamable-http transport on custom port +uv run mcp-simple-tool-async --transport streamable-http --port 8000 +``` + +The server exposes an async tool named "fetch_website" that accepts one required argument: + +- `url`: The URL of the website to fetch + +The tool runs asynchronously with a 5-second delay to simulate a long-running operation, making it useful for testing async tool capabilities. + +## Example + +Using the MCP client with protocol version "next", you can use the async tool like this: + +```python +import asyncio +from mcp import ClientSession, types +from mcp.client.streamable_http import streamablehttp_client + + +async def main(): + async with streamablehttp_client("http://127.0.0.1:8000/mcp") as (read, write, _): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + + # Call the async tool + result = await session.call_tool("fetch_website", {"url": "https://example.com"}) + + # Get operation token + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + print(f"Result: {final_result.result.content[0].text}") + break + await asyncio.sleep(0.5) + + +asyncio.run(main()) +``` diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py @@ -0,0 +1 @@ + diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py new file mode 100644 index 000000000..e7ef16530 --- /dev/null +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py @@ -0,0 +1,5 @@ +import sys + +from .server import main + +sys.exit(main()) # type: ignore[call-arg] diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py new file mode 100644 index 000000000..3ac9b2c67 --- /dev/null +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py @@ -0,0 +1,40 @@ +import anyio +import click +import mcp.types as types +import uvicorn +from mcp.server.fastmcp import FastMCP +from mcp.shared._httpx_utils import create_mcp_http_client + +mcp = FastMCP("mcp-website-fetcher") + + +@mcp.tool(invocation_modes=["async"]) +async def fetch_website( + url: str, +) -> list[types.ContentBlock]: + headers = {"User-Agent": "MCP Test Server (github.com/modelcontextprotocol/python-sdk)"} + async with create_mcp_http_client(headers=headers) as client: + await anyio.sleep(5) + response = await client.get(url) + response.raise_for_status() + return [types.TextContent(type="text", text=response.text)] + + +@click.command() +@click.option("--port", default=8000, help="Port to listen on for HTTP") +@click.option( + "--transport", + type=click.Choice(["stdio", "streamable-http"]), + default="stdio", + help="Transport type", +) +def main(port: int, transport: str): + if transport == "stdio": + mcp.run(transport="stdio") + elif transport == "streamable-http": + app = mcp.streamable_http_app() + server = uvicorn.Server(config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error")) + print(f"Starting {transport} server on port {port}") + server.run() + else: + raise ValueError(f"Invalid transport for test server: {transport}") diff --git a/examples/servers/simple-tool-async/pyproject.toml b/examples/servers/simple-tool-async/pyproject.toml new file mode 100644 index 000000000..46c00170d --- /dev/null +++ b/examples/servers/simple-tool-async/pyproject.toml @@ -0,0 +1,47 @@ +[project] +name = "mcp-simple-tool-async" +version = "0.1.0" +description = "A simple MCP server exposing an async website fetching tool" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +maintainers = [ + { name = "David Soria Parra", email = "davidsp@anthropic.com" }, + { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, +] +keywords = ["mcp", "llm", "automation", "web", "fetch"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["anyio>=4.5", "click>=8.2.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-simple-tool-async = "mcp_simple_tool_async.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_tool_async"] + +[tool.pyright] +include = ["mcp_simple_tool_async"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/servers/sqlite-async-operations/.gitignore b/examples/servers/sqlite-async-operations/.gitignore new file mode 100644 index 000000000..3997beadf --- /dev/null +++ b/examples/servers/sqlite-async-operations/.gitignore @@ -0,0 +1 @@ +*.db \ No newline at end of file diff --git a/examples/servers/sqlite-async-operations/README.md b/examples/servers/sqlite-async-operations/README.md new file mode 100644 index 000000000..efbcd7775 --- /dev/null +++ b/examples/servers/sqlite-async-operations/README.md @@ -0,0 +1,36 @@ +# SQLite Async Operations Example + +This example demonstrates how to implement custom async operations storage and task queuing using SQLite with the MCP Python SDK. + +## Architecture + +The example showcases the pluggable architecture of the async operations system: + +- `SQLiteAsyncOperationStore`: Custom implementation that persists operations to SQLite +- `SQLiteAsyncOperationBroker`: Custom implementation that persists pending tasks to SQLite +- `ServerAsyncOperationManager`: Uses both custom store and broker for full persistence +- `FastMCP`: Configured with the custom async operations manager + +## Usage + +Install and run the server: + +```bash +# Using stdio transport (default) +# Run with default SQLite database +uv run mcp-sqlite-async-operations + +# Run with custom database path +uv run mcp-sqlite-async-operations --db-path /path/to/custom.db + +# Using streamable-http transport on custom port +uv run mcp-sqlite-async-operations --transport streamable-http --port 8000 +``` + +## Testing Persistent Async Operations + +1. Start the server +2. Call one of the async tools (`long_computation` or `fetch_data`) +3. **Restart the server while the operation is running** +4. The operation will automatically resume and complete +5. Use the operation token to check status and retrieve results diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py @@ -0,0 +1 @@ + diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py new file mode 100644 index 000000000..f5f6e402d --- /dev/null +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py @@ -0,0 +1,4 @@ +from .server import main + +if __name__ == "__main__": + main() diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py new file mode 100644 index 000000000..033b4abcb --- /dev/null +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py @@ -0,0 +1,371 @@ +"""SQLite-based async operations example server.""" + +from __future__ import annotations + +import json +import sqlite3 +import time +from collections import deque +from typing import Any + +import anyio +import click +import uvicorn +from mcp import types +from mcp.server.fastmcp import FastMCP +from mcp.server.session import ServerSession +from mcp.shared._httpx_utils import create_mcp_http_client +from mcp.shared.async_operations import ( + AsyncOperationBroker, + AsyncOperationStore, + PendingAsyncTask, + ServerAsyncOperation, + ServerAsyncOperationManager, +) +from mcp.shared.context import RequestContext +from mcp.types import AsyncOperationStatus, CallToolResult + + +class SQLiteAsyncOperationStore(AsyncOperationStore): + """SQLite-based implementation of AsyncOperationStore.""" + + def __init__(self, db_path: str = "async_operations.db"): + self.db_path = db_path + self._init_db() + + def _init_db(self): + """Initialize the SQLite database.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS operations ( + token TEXT PRIMARY KEY, + tool_name TEXT NOT NULL, + arguments TEXT NOT NULL, + status TEXT NOT NULL, + created_at REAL NOT NULL, + keep_alive INTEGER NOT NULL, + resolved_at REAL, + session_id TEXT, + result TEXT, + error TEXT + ) + """) + conn.commit() + + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.execute("SELECT * FROM operations WHERE token = ?", (token,)) + row = cursor.fetchone() + if not row: + return None + + # Reconstruct CallToolResult from stored JSON + result = None + if row["result"]: + result_data = json.loads(row["result"]) + result = CallToolResult( + content=result_data.get("content", []), + structuredContent=result_data.get("structuredContent"), + isError=result_data.get("isError", False), + ) + + return ServerAsyncOperation( + token=row["token"], + tool_name=row["tool_name"], + arguments=json.loads(row["arguments"]), + status=row["status"], + created_at=row["created_at"], + keep_alive=row["keep_alive"], + resolved_at=row["resolved_at"], + session_id=row["session_id"], + result=result, + error=row["error"], + ) + + async def store_operation(self, operation: ServerAsyncOperation) -> None: + """Store an operation.""" + # Serialize result using Pydantic model_dump() + result_json = None + if operation.result: + try: + result_dict = operation.result.model_dump() + result_json = json.dumps(result_dict) + except (TypeError, ValueError): + # Skip if not serializable + result_json = None + + with sqlite3.connect(self.db_path) as conn: + conn.execute( + """ + INSERT OR REPLACE INTO operations + (token, tool_name, arguments, status, created_at, keep_alive, + resolved_at, session_id, result, error) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + operation.token, + operation.tool_name, + json.dumps(operation.arguments), + operation.status, + operation.created_at, + operation.keep_alive, + operation.resolved_at, + operation.session_id, + result_json, + operation.error, + ), + ) + conn.commit() + + async def update_status(self, token: str, status: AsyncOperationStatus) -> bool: + """Update operation status.""" + operation = await self.get_operation(token) + if not operation: + return False + + # Don't allow transitions from terminal states + if operation.is_terminal: + return False + + resolved_at = time.time() if status in ("completed", "failed", "canceled") else None + + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + UPDATE operations + SET status = ?, resolved_at = ? + WHERE token = ? + """, + (status, resolved_at, token), + ) + conn.commit() + + return cursor.rowcount > 0 + + async def complete_operation_with_result(self, token: str, result: CallToolResult) -> bool: + """Complete operation with result.""" + operation = await self.get_operation(token) + if not operation or operation.is_terminal: + return False + + # Serialize result using Pydantic model_dump() + result_json = None + try: + result_dict = result.model_dump() + result_json = json.dumps(result_dict) + except (TypeError, ValueError): + # Skip if not serializable + result_json = None + + resolved_at = time.time() + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + UPDATE operations + SET status = 'completed', result = ?, resolved_at = ? + WHERE token = ? + """, + (result_json, resolved_at, token), + ) + conn.commit() + return cursor.rowcount > 0 + + async def fail_operation_with_error(self, token: str, error: str) -> bool: + """Fail operation with error.""" + operation = await self.get_operation(token) + if not operation or operation.is_terminal: + return False + + resolved_at = time.time() + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + UPDATE operations + SET status = 'failed', error = ?, resolved_at = ? + WHERE token = ? + """, + (error, resolved_at, token), + ) + conn.commit() + return cursor.rowcount > 0 + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + current_time = time.time() + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + DELETE FROM operations + WHERE resolved_at IS NOT NULL + AND (resolved_at + keep_alive) < ? + """, + (current_time,), + ) + conn.commit() + return cursor.rowcount + + +class SQLiteAsyncOperationBroker(AsyncOperationBroker): + """SQLite-based implementation of AsyncOperationBroker for persistent task queuing.""" + + def __init__(self, db_path: str = "async_operations.db"): + self.db_path = db_path + self._task_queue: deque[PendingAsyncTask] = deque() + self._init_db() + # Load persisted tasks on startup + self._load_persisted_tasks_sync() + + def _load_persisted_tasks_sync(self): + """Load persisted tasks from SQLite on startup (sync version for __init__).""" + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.execute(""" + SELECT token, tool_name, arguments, request_id, operation_token, meta, supports_async + FROM pending_tasks ORDER BY created_at + """) + for row in cursor.fetchall(): + # Check if operation is already terminal - don't queue if so + with sqlite3.connect(self.db_path) as op_conn: + op_conn.row_factory = sqlite3.Row + op_cursor = op_conn.execute("SELECT status FROM operations WHERE token = ?", (row["token"],)) + op_row = op_cursor.fetchone() + if op_row and op_row["status"] in ("completed", "failed", "canceled"): + continue + + # Reconstruct serializable parts of RequestContext + from mcp.shared.context import SerializableRequestContext + + serializable_context = None + if row["request_id"]: + serializable_context = SerializableRequestContext( + request_id=row["request_id"], + operation_token=row["operation_token"], + meta=json.loads(row["meta"]) if row["meta"] else None, + supports_async=bool(row["supports_async"]), + ) + + task = PendingAsyncTask( + token=row["token"], + tool_name=row["tool_name"], + arguments=json.loads(row["arguments"]), + request_context=serializable_context, + ) + self._task_queue.append(task) + + def _init_db(self): + """Initialize the SQLite database for pending tasks.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS pending_tasks ( + token TEXT PRIMARY KEY, + tool_name TEXT NOT NULL, + arguments TEXT NOT NULL, + request_id TEXT, + operation_token TEXT, + meta TEXT, + request_data TEXT, + supports_async INTEGER DEFAULT 0, + created_at REAL NOT NULL + ) + """) + conn.commit() + + async def enqueue_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue a task for execution and persist to SQLite.""" + # Store in memory queue for immediate processing + task = PendingAsyncTask(token=token, tool_name=tool_name, arguments=arguments, request_context=request_context) + self._task_queue.append(task) + + # Extract serializable parts for persistence + serializable = request_context.to_serializable() + request_id = serializable.request_id + operation_token = serializable.operation_token + supports_async = serializable.supports_async + meta = json.dumps(serializable.meta.model_dump()) if serializable.meta else None + + # Persist to SQLite for restart recovery + with sqlite3.connect(self.db_path) as conn: + conn.execute( + """ + INSERT OR REPLACE INTO pending_tasks + (token, tool_name, arguments, request_id, operation_token, meta, + supports_async, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + token, + tool_name, + json.dumps(arguments), + request_id, + operation_token, + meta, + int(supports_async), + time.time(), + ), + ) + conn.commit() + + async def get_pending_tasks(self) -> list[PendingAsyncTask]: + """Get all pending tasks without clearing them.""" + return list(self._task_queue) + + async def acknowledge_task(self, token: str) -> None: + """Acknowledge that a task has been dispatched (but keep it in SQLite until completion).""" + # Remove from memory queue only - keep in SQLite until operation completes + self._task_queue = deque(task for task in self._task_queue if task.token != token) + + async def complete_task(self, token: str) -> None: + """Remove a completed task from persistent storage.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute("DELETE FROM pending_tasks WHERE token = ?", (token,)) + conn.commit() + + +@click.command() +@click.option("--port", default=8000, help="Port to listen on for HTTP") +@click.option( + "--transport", + type=click.Choice(["stdio", "streamable-http"]), + default="stdio", + help="Transport type", +) +@click.option("--db-path", default="async_operations.db", help="SQLite database path") +def main(port: int, transport: str, db_path: str): + """Run the SQLite async operations example server.""" + # Create components with specified database path + broker = SQLiteAsyncOperationBroker(db_path) + store = SQLiteAsyncOperationStore(db_path) # No broker reference needed + manager = ServerAsyncOperationManager(store=store, broker=broker) + mcp = FastMCP("SQLite Async Operations Demo", async_operations=manager) + + @mcp.tool(invocation_modes=["async"]) + async def fetch_website( + url: str, + ) -> list[types.ContentBlock]: + headers = {"User-Agent": "MCP Test Server (github.com/modelcontextprotocol/python-sdk)"} + async with create_mcp_http_client(headers=headers) as client: + await anyio.sleep(10) + response = await client.get(url) + response.raise_for_status() + return [types.TextContent(type="text", text=response.text)] + + print(f"Starting server with SQLite database: {db_path}") + print("Pending tasks will be automatically restarted on server restart!") + + if transport == "stdio": + mcp.run(transport="stdio") + elif transport == "streamable-http": + app = mcp.streamable_http_app() + server = uvicorn.Server(config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error")) + print(f"Starting {transport} server on port {port}") + server.run() + else: + raise ValueError(f"Invalid transport for test server: {transport}") diff --git a/examples/servers/sqlite-async-operations/pyproject.toml b/examples/servers/sqlite-async-operations/pyproject.toml new file mode 100644 index 000000000..e5ba37f29 --- /dev/null +++ b/examples/servers/sqlite-async-operations/pyproject.toml @@ -0,0 +1,33 @@ +[project] +name = "mcp-sqlite-async-operations" +version = "0.1.0" +description = "Example MCP server demonstrating SQLite-based async operations storage" +readme = "README.md" +requires-python = ">=3.10" +dependencies = ["anyio>=4.5", "click>=8.2.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-sqlite-async-operations = "mcp_sqlite_async_operations.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_sqlite_async_operations"] + +[tool.pyright] +include = ["mcp_sqlite_async_operations"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/snippets/clients/async_elicitation_client.py b/examples/snippets/clients/async_elicitation_client.py new file mode 100644 index 000000000..0f7290784 --- /dev/null +++ b/examples/snippets/clients/async_elicitation_client.py @@ -0,0 +1,119 @@ +""" +Client example for async tools with elicitation. + +cd to the `examples/snippets` directory and run: + uv run async-elicitation-client +""" + +import os + +import anyio + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client +from mcp.shared.context import RequestContext + +# Server parameters for async elicitation example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_elicitation", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def elicitation_callback(context: RequestContext[ClientSession, None], params: types.ElicitRequestParams): + """Handle elicitation requests from the server.""" + print(f"Server is asking: {params.message}") + + # Handle different types of elicitation + if "data_migration" in params.message: + print("Client responding: Continue with high priority") + return types.ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "high"}, + ) + elif "file operation" in params.message.lower() or "confirm" in params.message.lower(): + print("Client responding: Confirm operation with backup") + return types.ElicitResult( + action="accept", + content={"confirm_operation": True, "backup_first": True}, + ) + elif "How should we proceed" in params.message: + print("Client responding: Continue with normal priority") + return types.ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "normal"}, + ) + else: + print("Client responding: Decline") + return types.ElicitResult(action="decline") + + +async def test_process_with_confirmation(session: ClientSession): + """Test process that requires user confirmation.""" + print("Testing process with confirmation...") + + result = await session.call_tool("process_with_confirmation", {"operation": "data_migration"}) + + if result.operation: + token = result.operation.token + print(f"Operation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await anyio.sleep(0.3) + + +async def test_file_operation(session: ClientSession): + """Test file operation with confirmation.""" + print("\nTesting file operation...") + + result = await session.call_tool( + "file_operation", {"file_path": "/path/to/important_file.txt", "operation_type": "delete"} + ) + + if result.operation: + token = result.operation.token + print(f"File operation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"File operation failed: {status.error}") + break + + await anyio.sleep(0.3) + + +async def run(): + """Run the async elicitation client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession( + read, write, protocol_version="next", elicitation_callback=elicitation_callback + ) as session: + await session.initialize() + + await test_process_with_confirmation(session) + await test_file_operation(session) + + print("\nElicitation examples complete!") + + +if __name__ == "__main__": + anyio.run(run) diff --git a/examples/snippets/clients/async_progress_client.py b/examples/snippets/clients/async_progress_client.py new file mode 100644 index 000000000..337131a92 --- /dev/null +++ b/examples/snippets/clients/async_progress_client.py @@ -0,0 +1,111 @@ +""" +Client example for async tools with progress notifications. + +cd to the `examples/snippets` directory and run: + uv run async-progress-client +""" + +import os + +import anyio + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Server parameters for async progress example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_progress", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def test_batch_processing(session: ClientSession): + """Test batch processing with progress notifications.""" + print("Testing batch processing with progress notifications...") + + items = ["apple", "banana", "cherry", "date", "elderberry"] + progress_updates: list[tuple[float, float | None, str | None]] = [] + + async def progress_callback(progress: float, total: float | None, message: str | None) -> None: + progress_pct = int(progress * 100) if progress else 0 + total_str = f"/{int(total * 100)}%" if total else "" + message_str = f" - {message}" if message else "" + print(f"Progress: {progress_pct}{total_str}{message_str}") + progress_updates.append((progress, total, message)) + + result = await session.call_tool("batch_process", arguments={"items": items}, progress_callback=progress_callback) + + if result.operation: + token = result.operation.token + print(f"Batch operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + + # Show structured result + if final_result.result.structuredContent: + print(f"Structured result: {final_result.result.structuredContent}") + + # Show text content + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Text result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await anyio.sleep(0.3) + + print(f"Received {len(progress_updates)} progress updates") + + +async def test_data_pipeline(session: ClientSession): + """Test data pipeline with progress tracking.""" + print("\nTesting data pipeline...") + + operations = ["validate", "clean", "transform", "analyze", "export"] + + result = await session.call_tool( + "data_pipeline", arguments={"dataset": "customer_data.csv", "operations": operations} + ) + + if result.operation: + token = result.operation.token + print(f"Pipeline started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + + if final_result.result.structuredContent: + print("Pipeline results:") + for op, result_text in final_result.result.structuredContent.items(): + print(f" {op}: {result_text}") + break + elif status.status == "failed": + print(f"Pipeline failed: {status.error}") + break + + await anyio.sleep(0.3) + + +async def run(): + """Run the async progress client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + + await test_batch_processing(session) + await test_data_pipeline(session) + + print("\nProgress notification examples complete!") + + +if __name__ == "__main__": + anyio.run(run) diff --git a/examples/snippets/clients/async_sampling_client.py b/examples/snippets/clients/async_sampling_client.py new file mode 100644 index 000000000..7cd1d1e13 --- /dev/null +++ b/examples/snippets/clients/async_sampling_client.py @@ -0,0 +1,127 @@ +""" +Client example for async tools with sampling (LLM interaction). + +cd to the `examples/snippets` directory and run: + uv run async-sampling-client +""" + +import os + +import anyio + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client +from mcp.shared.context import RequestContext + +# Server parameters for async sampling example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_sampling", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def sampling_callback( + context: RequestContext[ClientSession, None], params: types.CreateMessageRequestParams +) -> types.CreateMessageResult: + """Handle sampling requests from the server.""" + print("Server requesting LLM generation...") + + # Simulate LLM response based on the prompt + response = "Generic simulated LLM response." + + if params.messages and len(params.messages) > 0: + message = params.messages[0] + if hasattr(message, "content") and isinstance(message.content, types.TextContent): + prompt_text = message.content.text + + # Generate different responses based on prompt content + if "poem" in prompt_text.lower(): + response = "Roses are red, violets are blue,\nThis is a simulated poem for you!" + elif "story" in prompt_text.lower(): + response = "Once upon a time, in a digital realm, there lived a helpful AI assistant..." + elif "summary" in prompt_text.lower(): + response = "This is a concise summary of the requested topic, generated by simulation." + elif "analysis" in prompt_text.lower(): + response = ( + "Analysis: This topic demonstrates key concepts and relationships that are important to understand." + ) + else: + response = "This is a simulated LLM response for testing purposes." + + return types.CreateMessageResult( + role="assistant", + content=types.TextContent(type="text", text=response), + model="test-model", + ) + + +async def test_content_generation(session: ClientSession): + """Test content generation with LLM sampling.""" + print("Testing content generation...") + + result = await session.call_tool("generate_content", {"topic": "artificial intelligence", "content_type": "poem"}) + + if result.operation: + token = result.operation.token + print(f"Content generation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Generated content:\n{content.text}") + break + elif status.status == "failed": + print(f"Generation failed: {status.error}") + break + + await anyio.sleep(0.3) + + +async def test_multi_step_generation(session: ClientSession): + """Test multi-step content generation.""" + print("\nTesting multi-step generation...") + + steps = ["write a brief introduction", "explain the main concepts", "provide a conclusion"] + + result = await session.call_tool("multi_step_generation", {"topic": "machine learning", "steps": steps}) + + if result.operation: + token = result.operation.token + print(f"Multi-step generation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + + if final_result.result.structuredContent: + print("Generated content by step:") + for step, content in final_result.result.structuredContent.items(): + print(f"\n{step}:") + print(f" {content}") + break + elif status.status == "failed": + print(f"Multi-step generation failed: {status.error}") + break + + await anyio.sleep(0.3) + + +async def run(): + """Run the async sampling client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next", sampling_callback=sampling_callback) as session: + await session.initialize() + + await test_content_generation(session) + await test_multi_step_generation(session) + + print("\nSampling examples complete!") + + +if __name__ == "__main__": + anyio.run(run) diff --git a/examples/snippets/clients/async_tool_client.py b/examples/snippets/clients/async_tool_client.py new file mode 100644 index 000000000..e67a18733 --- /dev/null +++ b/examples/snippets/clients/async_tool_client.py @@ -0,0 +1,60 @@ +""" +Client example for async tools. + +cd to the `examples/snippets` directory and run: + uv run async-tool-client +""" + +import os + +import anyio + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Server parameters for async tool example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_basic", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def call_async_tool(session: ClientSession): + """Demonstrate calling an async tool.""" + print("Calling async tool...") + + result = await session.call_tool("analyze_data", arguments={"dataset": "customer_data.csv"}) + + if result.operation: + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await anyio.sleep(0.5) + + +async def run(): + """Run the async tool client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + await call_async_tool(session) + + +if __name__ == "__main__": + anyio.run(run) diff --git a/examples/snippets/pyproject.toml b/examples/snippets/pyproject.toml index 76791a55a..ea9c1658a 100644 --- a/examples/snippets/pyproject.toml +++ b/examples/snippets/pyproject.toml @@ -3,9 +3,7 @@ name = "mcp-snippets" version = "0.1.0" description = "MCP Example Snippets" requires-python = ">=3.10" -dependencies = [ - "mcp", -] +dependencies = ["mcp"] [build-system] requires = ["setuptools", "wheel"] @@ -21,3 +19,4 @@ completion-client = "clients.completion_client:main" direct-execution-server = "servers.direct_execution:main" display-utilities-client = "clients.display_utilities:main" oauth-client = "clients.oauth_client:run" +async-tools-client = "clients.async_tools_client:main" diff --git a/examples/snippets/servers/__init__.py b/examples/snippets/servers/__init__.py index b9865e822..a5aefd538 100644 --- a/examples/snippets/servers/__init__.py +++ b/examples/snippets/servers/__init__.py @@ -22,7 +22,8 @@ def run_server(): print("Usage: server [transport]") print("Available servers: basic_tool, basic_resource, basic_prompt, tool_progress,") print(" sampling, elicitation, completion, notifications,") - print(" fastmcp_quickstart, structured_output, images") + print(" fastmcp_quickstart, structured_output, images,") + print(" async_tools_example") print("Available transports: stdio (default), sse, streamable-http") sys.exit(1) diff --git a/examples/snippets/servers/async_tool_basic.py b/examples/snippets/servers/async_tool_basic.py new file mode 100644 index 000000000..2ff3c4168 --- /dev/null +++ b/examples/snippets/servers/async_tool_basic.py @@ -0,0 +1,52 @@ +""" +Basic async tool example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_basic stdio +""" + +import anyio + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession + +mcp = FastMCP("Async Tool Basic") + + +@mcp.tool(invocation_modes=["async"]) +async def analyze_data(dataset: str, ctx: Context[ServerSession, None]) -> str: + """Analyze a dataset asynchronously with progress updates.""" + await ctx.info(f"Starting analysis of {dataset}") + + # Simulate analysis with progress updates + for i in range(5): + await anyio.sleep(0.5) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") + + await ctx.info("Analysis complete") + return f"Analysis results for {dataset}: 95% accuracy achieved" + + +@mcp.tool(invocation_modes=["sync", "async"]) +async def process_text(text: str, ctx: Context[ServerSession, None]) -> str: + """Process text in sync or async mode.""" + + await ctx.info(f"Processing text asynchronously: {text[:20]}...") + await anyio.sleep(0.3) + + return f"Processed: {text.upper()}" + + +@mcp.tool() +async def process_text_sync(text: str, ctx: Context[ServerSession, None]) -> str: + """Process text in sync mode only.""" + + await ctx.info(f"Processing text: {text[:20]}...") + await anyio.sleep(0.3) + + return f"Processed: {text.upper()}" + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_elicitation.py b/examples/snippets/servers/async_tool_elicitation.py new file mode 100644 index 000000000..300fb3d27 --- /dev/null +++ b/examples/snippets/servers/async_tool_elicitation.py @@ -0,0 +1,103 @@ +""" +Async tool with elicitation example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_elicitation stdio +""" + +import anyio +from pydantic import BaseModel, Field + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession + +mcp = FastMCP("Async Tool Elicitation") + + +class UserPreferences(BaseModel): + """Schema for collecting user preferences.""" + + continue_processing: bool = Field(description="Should we continue with the operation?") + priority_level: str = Field( + default="normal", + description="Priority level: low, normal, high", + ) + + +class FileOperationChoice(BaseModel): + """Schema for file operation confirmation.""" + + confirm_operation: bool = Field(description="Confirm the file operation?") + backup_first: bool = Field(default=True, description="Create backup before operation?") + + +@mcp.tool(invocation_modes=["async"]) +async def process_with_confirmation(operation: str, ctx: Context[ServerSession, None]) -> str: + """Process an operation that requires user confirmation.""" + await ctx.info(f"Starting operation: {operation}") + + # Simulate some initial processing + await anyio.sleep(0.5) + await ctx.report_progress(0.3, 1.0, "Initial processing complete") + + # Ask user for preferences + result = await ctx.elicit( + message=f"Operation '{operation}' requires user input. How should we proceed?", + schema=UserPreferences, + ) + + if result.action == "accept" and result.data: + if result.data.continue_processing: + await ctx.info(f"Continuing with {result.data.priority_level} priority") + # Simulate processing based on user choice + processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) + await anyio.sleep(processing_time) + await ctx.report_progress(1.0, 1.0, "Operation complete") + return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" + else: + await ctx.warning("User chose not to continue") + return f"Operation '{operation}' cancelled by user" + else: + await ctx.error("User declined or cancelled the operation") + return f"Operation '{operation}' aborted" + + +@mcp.tool(invocation_modes=["async"]) +async def file_operation(file_path: str, operation_type: str, ctx: Context[ServerSession, None]) -> str: + """Perform file operation with user confirmation.""" + await ctx.info(f"Analyzing file: {file_path}") + + # Simulate initial analysis + await anyio.sleep(1) + await ctx.report_progress(0.3, 1.0, "File analysis complete") + + # Simulate finding something that requires user confirmation + await ctx.warning(f"About to perform {operation_type} on {file_path} - requires confirmation") + + # Ask user for confirmation + result = await ctx.elicit( + message=f"Confirm {operation_type} operation on {file_path}?", + schema=FileOperationChoice, + ) + + if result.action == "accept" and result.data: + if result.data.confirm_operation: + if result.data.backup_first: + await ctx.info("Creating backup first...") + await anyio.sleep(0.5) + await ctx.report_progress(0.7, 1.0, "Backup created") + + await ctx.info(f"Performing {operation_type} operation...") + await anyio.sleep(1) + await ctx.report_progress(1.0, 1.0, "Operation complete") + + backup_msg = " (with backup)" if result.data.backup_first else " (no backup)" + return f"Successfully performed {operation_type} on {file_path}{backup_msg}" + else: + return f"Operation {operation_type} on {file_path} cancelled by user" + else: + return f"Operation {operation_type} on {file_path} declined" + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_immediate.py b/examples/snippets/servers/async_tool_immediate.py new file mode 100644 index 000000000..0e34d9d0c --- /dev/null +++ b/examples/snippets/servers/async_tool_immediate.py @@ -0,0 +1,37 @@ +""" +Async tool with immediate result example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_immediate stdio +""" + +import anyio + +from mcp import types +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession + +mcp = FastMCP("Async Tool Immediate") + + +async def provide_immediate_feedback(operation: str) -> list[types.ContentBlock]: + """Provide immediate feedback while async operation starts.""" + return [types.TextContent(type="text", text=f"Starting {operation} operation. This will take a moment.")] + + +@mcp.tool(invocation_modes=["async"], immediate_result=provide_immediate_feedback) +async def long_analysis(operation: str, ctx: Context[ServerSession, None]) -> str: + """Perform long-running analysis with immediate user feedback.""" + await ctx.info(f"Beginning {operation} analysis") + + # Simulate long-running work + for i in range(4): + await anyio.sleep(1) + progress = (i + 1) / 4 + await ctx.report_progress(progress, 1.0, f"Analysis step {i + 1}/4") + + return f"Analysis '{operation}' completed with detailed results" + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_progress.py b/examples/snippets/servers/async_tool_progress.py new file mode 100644 index 000000000..ed98a5858 --- /dev/null +++ b/examples/snippets/servers/async_tool_progress.py @@ -0,0 +1,71 @@ +""" +Async tool with progress notifications example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_progress stdio +""" + +import anyio + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession + +mcp = FastMCP("Async Tool Progress") + + +@mcp.tool(invocation_modes=["async"]) +async def batch_process(items: list[str], ctx: Context[ServerSession, None]) -> list[str]: + """Process a batch of items with detailed progress reporting.""" + await ctx.info(f"Starting batch processing of {len(items)} items") + + results: list[str] = [] + + for i, item in enumerate(items): + await ctx.debug(f"Processing item {i + 1}: {item}") + + # Simulate variable processing time + processing_time = 0.3 + (len(item) * 0.1) + await anyio.sleep(processing_time) + + # Report progress for this item + progress = (i + 1) / len(items) + await ctx.report_progress(progress, 1.0, f"Processed {i + 1}/{len(items)}: {item}") + + # Process the item + result = f"PROCESSED_{item.upper()}" + results.append(result) + + await ctx.debug(f"Item {i + 1} result: {result}") + + await ctx.info(f"Batch processing complete! Processed {len(results)} items") + return results + + +@mcp.tool(invocation_modes=["async"]) +async def data_pipeline(dataset: str, operations: list[str], ctx: Context[ServerSession, None]) -> dict[str, str]: + """Execute a data processing pipeline with progress updates.""" + await ctx.info(f"Starting data pipeline for {dataset}") + + results: dict[str, str] = {} + total_ops = len(operations) + + for i, operation in enumerate(operations): + await ctx.debug(f"Executing operation: {operation}") + + # Simulate processing time that increases with complexity + processing_time = 0.5 + (i * 0.2) + await anyio.sleep(processing_time) + + # Report progress + progress = (i + 1) / total_ops + await ctx.report_progress(progress, 1.0, f"Completed {operation}") + + # Store result + results[operation] = f"Result of {operation} on {dataset}" + + await ctx.info("Data pipeline complete!") + return results + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_sampling.py b/examples/snippets/servers/async_tool_sampling.py new file mode 100644 index 000000000..d63273f50 --- /dev/null +++ b/examples/snippets/servers/async_tool_sampling.py @@ -0,0 +1,106 @@ +""" +Async tool with sampling (LLM interaction) example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_sampling stdio +""" + +import anyio + +from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession +from mcp.types import SamplingMessage, TextContent + +mcp = FastMCP("Async Tool Sampling") + + +@mcp.tool(invocation_modes=["async"]) +async def generate_content(topic: str, content_type: str, ctx: Context[ServerSession, None]) -> str: + """Generate content using LLM sampling with progress updates.""" + await ctx.info(f"Starting {content_type} generation for topic: {topic}") + + # Simulate preparation + await anyio.sleep(0.5) + await ctx.report_progress(0.2, 1.0, "Preparing content generation") + + # Create prompt based on content type + prompts = { + "poem": f"Write a creative poem about {topic}", + "story": f"Write a short story about {topic}", + "summary": f"Write a concise summary about {topic}", + "analysis": f"Provide a detailed analysis of {topic}", + } + + prompt = prompts.get(content_type, f"Write about {topic}") + await ctx.report_progress(0.4, 1.0, "Prompt prepared") + + # Use LLM sampling + await ctx.info("Requesting content from LLM...") + result = await ctx.session.create_message( + messages=[ + SamplingMessage( + role="user", + content=TextContent(type="text", text=prompt), + ) + ], + max_tokens=200, + ) + + await ctx.report_progress(0.8, 1.0, "Content generated") + + # Process the result + await anyio.sleep(0.3) + await ctx.report_progress(1.0, 1.0, "Processing complete") + + if result.content.type == "text": + await ctx.info(f"Successfully generated {content_type}") + return f"Generated {content_type} about '{topic}':\n\n{result.content.text}" + else: + await ctx.warning("Unexpected content type from LLM") + return f"Generated {content_type} about '{topic}': {str(result.content)}" + + +@mcp.tool(invocation_modes=["async"]) +async def multi_step_generation(topic: str, steps: list[str], ctx: Context[ServerSession, None]) -> dict[str, str]: + """Generate multiple pieces of content in sequence.""" + await ctx.info(f"Starting multi-step generation for: {topic}") + + results: dict[str, str] = {} + total_steps = len(steps) + + for i, step in enumerate(steps): + await ctx.debug(f"Processing step {i + 1}: {step}") + + # Create step-specific prompt + prompt = f"For the topic '{topic}', please {step}" + + # Use LLM sampling for this step + result = await ctx.session.create_message( + messages=[ + SamplingMessage( + role="user", + content=TextContent(type="text", text=prompt), + ) + ], + max_tokens=150, + ) + + # Store result + if result.content.type == "text": + results[step] = result.content.text + else: + results[step] = str(result.content) + + # Report progress + progress = (i + 1) / total_steps + await ctx.report_progress(progress, 1.0, f"Completed step {i + 1}/{total_steps}: {step}") + + # Small delay between steps + await anyio.sleep(0.2) + + await ctx.info(f"Multi-step generation complete! Generated {len(results)} pieces of content") + return results + + +if __name__ == "__main__": + mcp.run() diff --git a/pyproject.toml b/pyproject.toml index 5af7ff4d8..b8bdb95db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -128,7 +128,7 @@ mccabe.max-complexity = 24 # Default is 10 [tool.ruff.lint.pylint] allow-magic-value-types = ["bytes", "float", "int", "str"] -max-args = 23 # Default is 5 +max-args = 24 # Default is 5 max-branches = 23 # Default is 12 max-returns = 13 # Default is 6 max-statements = 102 # Default is 50 diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index bcf80d62a..dfb0eb508 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -2,12 +2,15 @@ from datetime import timedelta from typing import Any, Protocol +import anyio import anyio.lowlevel from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from jsonschema import SchemaError, ValidationError, validate from pydantic import AnyUrl, TypeAdapter +from typing_extensions import Self import mcp.types as types +from mcp.shared.async_operations import ClientAsyncOperationManager from mcp.shared.context import RequestContext from mcp.shared.message import SessionMessage from mcp.shared.session import BaseSession, ProgressFnT, RequestResponder @@ -118,6 +121,7 @@ def __init__( logging_callback: LoggingFnT | None = None, message_handler: MessageHandlerFnT | None = None, client_info: types.Implementation | None = None, + protocol_version: str | None = None, ) -> None: super().__init__( read_stream, @@ -127,12 +131,20 @@ def __init__( read_timeout_seconds=read_timeout_seconds, ) self._client_info = client_info or DEFAULT_CLIENT_INFO + self._protocol_version = protocol_version or types.LATEST_PROTOCOL_VERSION self._sampling_callback = sampling_callback or _default_sampling_callback self._elicitation_callback = elicitation_callback or _default_elicitation_callback self._list_roots_callback = list_roots_callback or _default_list_roots_callback self._logging_callback = logging_callback or _default_logging_callback self._message_handler = message_handler or _default_message_handler self._tool_output_schemas: dict[str, dict[str, Any] | None] = {} + self._operation_manager = ClientAsyncOperationManager() + + async def __aenter__(self) -> Self: + await super().__aenter__() + self._task_group.start_soon(self._operation_manager.cleanup_loop) + self._exit_stack.push_async_callback(lambda: self._operation_manager.stop_cleanup_loop()) + return self async def initialize(self) -> types.InitializeResult: sampling = types.SamplingCapability() if self._sampling_callback is not _default_sampling_callback else None @@ -152,7 +164,7 @@ async def initialize(self) -> types.InitializeResult: types.ClientRequest( types.InitializeRequest( params=types.InitializeRequestParams( - protocolVersion=types.LATEST_PROTOCOL_VERSION, + protocolVersion=self._protocol_version, capabilities=types.ClientCapabilities( sampling=sampling, elicitation=elicitation, @@ -273,8 +285,18 @@ async def call_tool( arguments: dict[str, Any] | None = None, read_timeout_seconds: timedelta | None = None, progress_callback: ProgressFnT | None = None, + *, + async_properties: types.AsyncRequestProperties | None = None, ) -> types.CallToolResult: - """Send a tools/call request with optional progress callback support.""" + """Send a tools/call request with optional progress callback support. + + Args: + name: Name of the tool to call + arguments: Arguments to pass to the tool + read_timeout_seconds: Read timeout for the request + progress_callback: Optional progress callback + async_properties: Optional async parameters for async tool execution + """ result = await self.send_request( types.ClientRequest( @@ -282,6 +304,7 @@ async def call_tool( params=types.CallToolRequestParams( name=name, arguments=arguments, + operation_params=async_properties, ), ) ), @@ -291,11 +314,65 @@ async def call_tool( ) if not result.isError: - await self._validate_tool_result(name, result) + # Track operation for async operations + if result.operation is not None: + self._operation_manager.track_operation( + result.operation.token, name, result.operation.keepAlive or 3600 + ) + logger.debug(f"Tracking operation for token: {result.operation.token}") + else: + await self._validate_tool_result(name, result) + + return result + + async def get_operation_status(self, token: str) -> types.GetOperationStatusResult: + """Check the status of an async tool operation. + + Args: + token: Token returned from async call_tool + + Returns: + Status result with current operation state + """ + return await self.send_request( + types.ClientRequest( + types.GetOperationStatusRequest( + params=types.GetOperationStatusParams(token=token), + ) + ), + types.GetOperationStatusResult, + ) + + async def get_operation_result(self, token: str) -> types.GetOperationPayloadResult: + """Get the result of a completed async tool operation. + + Args: + token: Token returned from async call_tool + + Returns: + The final tool result + """ + result = await self.send_request( + types.ClientRequest( + types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=token), + ) + ), + types.GetOperationPayloadResult, + ) + + # Validate using the stored tool name + if hasattr(result, "result") and result.result: + # Clean up expired operations first + self._operation_manager.cleanup_expired() + + tool_name = self._operation_manager.get_tool_name(token) + await self._validate_tool_result(tool_name, result.result) + # Keep the operation for potential future retrievals return result - async def _validate_tool_result(self, name: str, result: types.CallToolResult) -> None: + async def _validate_tool_result(self, name: str | None, result: types.CallToolResult) -> None: """Validate the structured content of a tool result against its output schema.""" if name not in self._tool_output_schemas: # refresh output schema cache @@ -308,6 +385,7 @@ async def _validate_tool_result(self, name: str, result: types.CallToolResult) - logger.warning(f"Tool {name} not listed by server, cannot validate any structured content") if output_schema is not None: + logger.debug(f"Validating structured content for tool: {name}") if result.structuredContent is None: raise RuntimeError(f"Tool {name} has an output schema but did not return structured content") try: @@ -388,8 +466,10 @@ async def send_roots_list_changed(self) -> None: async def _received_request(self, responder: RequestResponder[types.ServerRequest, types.ClientResult]) -> None: ctx = RequestContext[ClientSession, Any]( request_id=responder.request_id, + operation_token=responder.operation.token if responder.operation is not None else None, meta=responder.request_meta, session=self, + supports_async=False, # No client tools right now lifespan_context=None, ) @@ -397,12 +477,36 @@ async def _received_request(self, responder: RequestResponder[types.ServerReques case types.CreateMessageRequest(params=params): with responder: response = await self._sampling_callback(ctx, params) + if isinstance(response, types.CreateMessageResult): + response.operation_props = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) + else: + response.operation = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) client_response = ClientResponse.validate_python(response) await responder.respond(client_response) case types.ElicitRequest(params=params): with responder: response = await self._elicitation_callback(ctx, params) + if isinstance(response, types.ElicitResult): + response.operation_props = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) + else: + response.operation = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) client_response = ClientResponse.validate_python(response) await responder.respond(client_response) diff --git a/src/mcp/server/elicitation.py b/src/mcp/server/elicitation.py index 39e3212e9..5dec1767d 100644 --- a/src/mcp/server/elicitation.py +++ b/src/mcp/server/elicitation.py @@ -78,6 +78,7 @@ async def elicit_with_validation( message: str, schema: type[ElicitSchemaModelT], related_request_id: RequestId | None = None, + related_operation_token: str | None = None, ) -> ElicitationResult[ElicitSchemaModelT]: """Elicit information from the client/user with schema validation. @@ -96,6 +97,7 @@ async def elicit_with_validation( message=message, requestedSchema=json_schema, related_request_id=related_request_id, + related_operation_token=related_operation_token, ) if result.action == "accept" and result.content is not None: diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 485ef1519..38dda76ea 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -2,6 +2,7 @@ from __future__ import annotations as _annotations +import contextlib import inspect import re from collections.abc import AsyncIterator, Awaitable, Callable, Collection, Iterable, Sequence @@ -21,6 +22,7 @@ from starlette.routing import Mount, Route from starlette.types import Receive, Scope, Send +import mcp.types as types from mcp.server.auth.middleware.auth_context import AuthContextMiddleware from mcp.server.auth.middleware.bearer_auth import BearerAuthBackend, RequireAuthMiddleware from mcp.server.auth.provider import OAuthAuthorizationServerProvider, ProviderTokenVerifier, TokenVerifier @@ -30,6 +32,7 @@ from mcp.server.fastmcp.prompts import Prompt, PromptManager from mcp.server.fastmcp.resources import FunctionResource, Resource, ResourceManager from mcp.server.fastmcp.tools import Tool, ToolManager +from mcp.server.fastmcp.tools.base import InvocationMode from mcp.server.fastmcp.utilities.context_injection import find_context_parameter from mcp.server.fastmcp.utilities.logging import configure_logging, get_logger from mcp.server.lowlevel.helper_types import ReadResourceContents @@ -42,8 +45,17 @@ from mcp.server.streamable_http import EventStore from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings +from mcp.shared.async_operations import ServerAsyncOperationManager from mcp.shared.context import LifespanContextT, RequestContext, RequestT -from mcp.types import AnyFunction, ContentBlock, GetPromptResult, Icon, ToolAnnotations +from mcp.types import ( + AnyFunction, + ContentBlock, + GetOperationPayloadResult, + GetOperationStatusResult, + GetPromptResult, + Icon, + ToolAnnotations, +) from mcp.types import Prompt as MCPPrompt from mcp.types import PromptArgument as MCPPromptArgument from mcp.types import Resource as MCPResource @@ -120,6 +132,8 @@ async def wrap(_: MCPServer[LifespanResultT, Request]) -> AsyncIterator[Lifespan class FastMCP(Generic[LifespanResultT]): + _tool_manager: ToolManager + def __init__( # noqa: PLR0913 self, name: str | None = None, @@ -130,6 +144,7 @@ def __init__( # noqa: PLR0913 token_verifier: TokenVerifier | None = None, event_store: EventStore | None = None, *, + async_operations: ServerAsyncOperationManager | None = None, tools: list[Tool] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", @@ -169,11 +184,14 @@ def __init__( # noqa: PLR0913 transport_security=transport_security, ) + self._async_operations = async_operations or ServerAsyncOperationManager() + self._mcp_server = MCPServer( name=name or "FastMCP", instructions=instructions, website_url=website_url, icons=icons, + async_operations=self._async_operations, # TODO(Marcelo): It seems there's a type mismatch between the lifespan type from an FastMCP and Server. # We need to create a Lifespan type that is a generic on the server type, like Starlette does. lifespan=(lifespan_wrapper(self, self.settings.lifespan) if self.settings.lifespan else default_lifespan), # type: ignore @@ -278,9 +296,84 @@ def _setup_handlers(self) -> None: self._mcp_server.get_prompt()(self.get_prompt) self._mcp_server.list_resource_templates()(self.list_resource_templates) + # Register async operation handlers + logger.info(f"Async operations manager: {self._async_operations}") + logger.info("Registering async operation handlers") + self._mcp_server.get_operation_status()(self.get_operation_status) + self._mcp_server.get_operation_result()(self.get_operation_result) + + async def get_operation_status(self, token: str) -> GetOperationStatusResult: + """Get the status of an async operation.""" + try: + operation = await self._async_operations.get_operation(token) + if not operation: + raise ValueError(f"Operation not found: {token}") + + return GetOperationStatusResult( + status=operation.status, + error=operation.error if operation.status == "failed" else None, + ) + except Exception: + logger.exception(f"Error getting operation status for token {token}") + raise + + async def get_operation_result(self, token: str) -> GetOperationPayloadResult: + """Get the result of a completed async operation.""" + try: + operation = await self._async_operations.get_operation(token) + if not operation: + raise ValueError(f"Operation not found: {token}") + + if operation.status != "completed": + raise ValueError(f"Operation not completed: {operation.status}") + + if not operation.result: + raise ValueError("Operation completed but no result available") + + return GetOperationPayloadResult(result=operation.result) + except Exception: + logger.exception(f"Error getting operation result for token {token}") + raise + + def _client_supports_async(self) -> bool: + """Check if the current client supports async tools.""" + try: + context = self.get_context() + return context.supports_async + except ValueError: + # Context not available (outside of request), assume no async support + pass + return False + + def _get_invocation_mode(self, info: Tool, client_supports_async: bool) -> Literal["sync", "async"] | None: + """Determine invocationMode field based on client support.""" + if not client_supports_async: + return None # Old clients don't see invocationMode field + + # New clients see the invocationMode field + modes = info.invocation_modes + if self._is_async_capable(modes): + return "async" # Hybrid or explicit async + if self._is_sync_only(modes): + return "sync" + return None + + def _is_async_capable(self, modes: list[InvocationMode]) -> bool: + """Return True if invocation_modes is async-only.""" + return "async" in modes + + def _is_sync_only(self, modes: list[InvocationMode]) -> bool: + """Return True if invocation_modes is sync-only.""" + return modes == ["sync"] + async def list_tools(self) -> list[MCPTool]: """List all available tools.""" tools = self._tool_manager.list_tools() + + # Check if client supports async tools based on protocol version + client_supports_async = self._client_supports_async() + + # Filter out async-only tools for old clients and set invocationMode based on client support return [ MCPTool( name=info.name, @@ -290,8 +383,15 @@ async def list_tools(self) -> list[MCPTool]: outputSchema=info.output_schema, annotations=info.annotations, icons=info.icons, + invocationMode=self._get_invocation_mode(info, client_supports_async), + _meta=info.meta, + internal=types.InternalToolProperties( + immediate_result=info.immediate_result, + keepalive=info.meta.get("_keep_alive") if info.meta else None, + ), ) for info in tools + if client_supports_async or info.invocation_modes != ["async"] ] def get_context(self) -> Context[ServerSession, LifespanResultT, Request]: @@ -364,6 +464,9 @@ def add_tool( annotations: ToolAnnotations | None = None, icons: list[Icon] | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = None, ) -> None: """Add a tool to the server. @@ -380,6 +483,12 @@ def add_tool( - If None, auto-detects based on the function's return type annotation - If True, creates a structured tool (return type annotation permitting) - If False, unconditionally creates an unstructured tool + invocation_modes: List of supported invocation modes (e.g., ["sync", "async"]) + - If None, defaults to ["sync"] for backwards compatibility + keep_alive: How long (in seconds) async operation results should be kept available. + Only applies to async tools. + immediate_result: Optional async function that returns immediate feedback content + for async tools. Must return list[ContentBlock]. Only valid for async-compatible tools. """ self._tool_manager.add_tool( fn, @@ -389,6 +498,9 @@ def add_tool( annotations=annotations, icons=icons, structured_output=structured_output, + invocation_modes=invocation_modes, + keep_alive=keep_alive, + immediate_result=immediate_result, ) def remove_tool(self, name: str) -> None: @@ -410,6 +522,9 @@ def tool( annotations: ToolAnnotations | None = None, icons: list[Icon] | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = None, ) -> Callable[[AnyFunction], AnyFunction]: """Decorator to register a tool. @@ -426,6 +541,14 @@ def tool( - If None, auto-detects based on the function's return type annotation - If True, creates a structured tool (return type annotation permitting) - If False, unconditionally creates an unstructured tool + invocation_modes: List of supported invocation modes (e.g., ["sync", "async"]) + - If None, defaults to ["sync"] for backwards compatibility + - Supports "sync" for synchronous execution and "async" for asynchronous execution + - Tools with "async" mode will be hidden from clients that don't support async execution + keep_alive: How long (in seconds) async operation results should be kept available. + Only applies to async tools. + immediate_result: Optional async function that returns immediate feedback content + for async tools. Must return list[ContentBlock]. Only valid for async-compatible tools. Example: @server.tool() @@ -441,6 +564,26 @@ def tool_with_context(x: int, ctx: Context) -> str: async def async_tool(x: int, context: Context) -> str: await context.report_progress(50, 100) return str(x) + + @server.tool(invocation_modes=["async"]) + async def async_only_tool(data: str, ctx: Context) -> str: + # This tool only supports async execution + await ctx.info("Starting long-running analysis...") + return await analyze_data(data) + + @server.tool(invocation_modes=["sync", "async"]) + def hybrid_tool(x: int) -> str: + # This tool supports both sync and async execution + return str(x) + + async def immediate_feedback(operation: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"Starting {operation}...")] + + @server.tool(invocation_modes=["async"], immediate_result=immediate_feedback) + async def long_running_tool(operation: str, ctx: Context) -> str: + # This tool provides immediate feedback while running asynchronously + await ctx.info(f"Processing {operation}") + return f"Completed {operation}" """ # Check if user passed function directly instead of calling decorator if callable(name): @@ -457,6 +600,9 @@ def decorator(fn: AnyFunction) -> AnyFunction: annotations=annotations, icons=icons, structured_output=structured_output, + invocation_modes=invocation_modes, + keep_alive=keep_alive, + immediate_result=immediate_result, ) return fn @@ -696,14 +842,21 @@ def decorator( return decorator + @contextlib.asynccontextmanager + async def _stdio_lifespan(self) -> AsyncIterator[None]: + """Lifespan that manages stdio operations.""" + async with self._async_operations.run(): + yield + async def run_stdio_async(self) -> None: """Run the server using stdio transport.""" async with stdio_server() as (read_stream, write_stream): - await self._mcp_server.run( - read_stream, - write_stream, - self._mcp_server.create_initialization_options(), - ) + async with self._stdio_lifespan(): + await self._mcp_server.run( + read_stream, + write_stream, + self._mcp_server.create_initialization_options(), + ) async def run_sse_async(self, mount_path: str | None = None) -> None: """Run the server using SSE transport.""" @@ -761,6 +914,12 @@ def _normalize_path(self, mount_path: str, endpoint: str) -> str: # Combine paths return mount_path + endpoint + @contextlib.asynccontextmanager + async def _sse_lifespan(self) -> AsyncIterator[None]: + """Lifespan that manages SSE operations.""" + async with self._async_operations.run(): + yield + def sse_app(self, mount_path: str | None = None) -> Starlette: """Return an instance of the SSE server app.""" from starlette.middleware import Middleware @@ -891,7 +1050,16 @@ async def sse_endpoint(request: Request) -> Response: routes.extend(self._custom_starlette_routes) # Create Starlette app with routes and middleware - return Starlette(debug=self.settings.debug, routes=routes, middleware=middleware) + return Starlette( + debug=self.settings.debug, routes=routes, middleware=middleware, lifespan=lambda app: self._sse_lifespan() + ) + + @contextlib.asynccontextmanager + async def _streamable_http_lifespan(self) -> AsyncIterator[None]: + """Lifespan that manages Streamable HTTP operations.""" + async with self.session_manager.run(): + async with self._async_operations.run(): + yield def streamable_http_app(self) -> Starlette: """Return an instance of the StreamableHTTP server app.""" @@ -986,7 +1154,7 @@ def streamable_http_app(self) -> Starlette: debug=self.settings.debug, routes=routes, middleware=middleware, - lifespan=lambda app: self.session_manager.run(), + lifespan=lambda app: self._streamable_http_lifespan(), ) async def list_prompts(self) -> list[MCPPrompt]: @@ -1122,6 +1290,8 @@ async def report_progress(self, progress: float, total: float | None = None, mes progress=progress, total=total, message=message, + related_request_id=self.request_id, + related_operation_token=self.request_context.operation_token, ) async def read_resource(self, uri: str | AnyUrl) -> Iterable[ReadResourceContents]: @@ -1164,7 +1334,11 @@ async def elicit( """ return await elicit_with_validation( - session=self.request_context.session, message=message, schema=schema, related_request_id=self.request_id + session=self.request_context.session, + message=message, + schema=schema, + related_request_id=self.request_id, + related_operation_token=self.request_context.operation_token, ) async def log( @@ -1182,12 +1356,17 @@ async def log( logger_name: Optional logger name **extra: Additional structured data to include """ - await self.request_context.session.send_log_message( - level=level, - data=message, - logger=logger_name, - related_request_id=self.request_id, - ) + try: + await self.request_context.session.send_log_message( + level=level, + data=message, + logger=logger_name, + related_request_id=self.request_id, + ) + except Exception as e: + # Session might be closed (e.g., client disconnected) + logger.warning(f"Failed to send log message to client (session closed?): {e}") + pass @property def client_id(self) -> str | None: @@ -1204,6 +1383,11 @@ def session(self): """Access to the underlying session for advanced usage.""" return self.request_context.session + @property + def supports_async(self): + """If async tools are supported in the current context.""" + return self.request_context.supports_async + # Convenience methods for common log levels async def debug(self, message: str, **extra: Any) -> None: """Send a debug log message.""" diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index 3f26ddcea..099755cbf 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -2,22 +2,24 @@ import functools import inspect -from collections.abc import Callable +from collections.abc import Awaitable, Callable from functools import cached_property -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Literal from pydantic import BaseModel, Field from mcp.server.fastmcp.exceptions import ToolError from mcp.server.fastmcp.utilities.context_injection import find_context_parameter from mcp.server.fastmcp.utilities.func_metadata import FuncMetadata, func_metadata -from mcp.types import Icon, ToolAnnotations +from mcp.types import ContentBlock, Icon, ToolAnnotations if TYPE_CHECKING: from mcp.server.fastmcp.server import Context from mcp.server.session import ServerSessionT from mcp.shared.context import LifespanContextT, RequestT +InvocationMode = Literal["sync", "async"] + class Tool(BaseModel): """Internal tool registration info.""" @@ -34,6 +36,13 @@ class Tool(BaseModel): context_kwarg: str | None = Field(None, description="Name of the kwarg that should receive context") annotations: ToolAnnotations | None = Field(None, description="Optional annotations for the tool") icons: list[Icon] | None = Field(default=None, description="Optional list of icons for this tool") + invocation_modes: list[InvocationMode] = Field( + default=["sync"], description="Supported invocation modes (sync/async)" + ) + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = Field( + None, exclude=True, description="Optional immediate result function for async tools" + ) + meta: dict[str, Any] | None = Field(description="Optional additional tool information.", default=None) @cached_property def output_schema(self) -> dict[str, Any] | None: @@ -50,6 +59,10 @@ def from_function( annotations: ToolAnnotations | None = None, icons: list[Icon] | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[Any]]] | None = None, + meta: dict[str, Any] | None = None, ) -> Tool: """Create a Tool from a function.""" func_name = name or fn.__name__ @@ -70,6 +83,43 @@ def from_function( ) parameters = func_arg_metadata.arg_model.model_json_schema(by_alias=True) + # Default to sync mode if no invocation modes specified + if invocation_modes is None: + invocation_modes = ["sync"] + + # Set appropriate default keep_alive based on async compatibility + # if user didn't specify custom keep_alive + if keep_alive is None and "async" in invocation_modes: + keep_alive = 3600 # Default for async-compatible tools + + # Validate keep_alive is only used with async-compatible tools + if keep_alive is not None and "async" not in invocation_modes: + raise ValueError( + f"keep_alive parameter can only be used with async-compatible tools. " + f"Tool '{func_name}' has invocation_modes={invocation_modes} " + f"but specifies keep_alive={keep_alive}. " + f"Add 'async' to invocation_modes to use keep_alive." + ) + + # Process meta dictionary and add keep_alive if specified + meta = meta or {} + if keep_alive is not None: + meta = meta.copy() # Don't modify the original dict + meta["_keep_alive"] = keep_alive + + # Validate immediate_result usage + if immediate_result is not None: + # Check if tool supports async invocation + if "async" not in invocation_modes: + raise ValueError( + "immediate_result can only be used with async-compatible tools. " + "Add 'async' to invocation_modes to use immediate_result." + ) + + # Validate that immediate_result is an async callable + if not _is_async_callable(immediate_result): + raise ValueError("immediate_result must be an async callable that returns list[ContentBlock]") + return cls( fn=fn, name=func_name, @@ -81,6 +131,9 @@ def from_function( context_kwarg=context_kwarg, annotations=annotations, icons=icons, + invocation_modes=invocation_modes, + immediate_result=immediate_result, + meta=meta, ) async def run( diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index d6c0054af..5a13da02f 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -1,13 +1,13 @@ from __future__ import annotations as _annotations -from collections.abc import Callable +from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, Any from mcp.server.fastmcp.exceptions import ToolError -from mcp.server.fastmcp.tools.base import Tool +from mcp.server.fastmcp.tools.base import InvocationMode, Tool from mcp.server.fastmcp.utilities.logging import get_logger from mcp.shared.context import LifespanContextT, RequestT -from mcp.types import Icon, ToolAnnotations +from mcp.types import ContentBlock, Icon, ToolAnnotations if TYPE_CHECKING: from mcp.server.fastmcp.server import Context @@ -51,6 +51,10 @@ def add_tool( annotations: ToolAnnotations | None = None, icons: list[Icon] | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = None, + meta: dict[str, Any] | None = None, ) -> Tool: """Add a tool to the server.""" tool = Tool.from_function( @@ -61,6 +65,10 @@ def add_tool( annotations=annotations, icons=icons, structured_output=structured_output, + invocation_modes=invocation_modes, + keep_alive=keep_alive, + immediate_result=immediate_result, + meta=meta, ) existing = self._tools.get(tool.name) if existing: diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 2fec3381b..d4011aeb4 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -77,6 +77,7 @@ async def main(): import anyio import jsonschema +from anyio.abc import TaskGroup from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import AnyUrl from typing_extensions import TypeVar @@ -86,10 +87,12 @@ async def main(): from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession +from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager from mcp.shared.context import RequestContext from mcp.shared.exceptions import McpError from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.session import RequestResponder +from mcp.types import NEXT_PROTOCOL_VERSION, Operation, RequestId logger = logging.getLogger(__name__) @@ -138,6 +141,7 @@ def __init__( instructions: str | None = None, website_url: str | None = None, icons: list[types.Icon] | None = None, + async_operations: ServerAsyncOperationManager | None = None, lifespan: Callable[ [Server[LifespanResultT, RequestT]], AbstractAsyncContextManager[LifespanResultT], @@ -149,10 +153,20 @@ def __init__( self.website_url = website_url self.icons = icons self.lifespan = lifespan + self.async_operations = async_operations or ServerAsyncOperationManager() + self.async_operations.set_handler(self._execute_tool_async) + # Track request ID to operation token mapping for cancellation + self._request_to_operation: dict[RequestId, str] = {} + # Store tool functions for async execution + self._tool_function: ( + Callable[..., Awaitable[UnstructuredContent | StructuredContent | CombinationContent]] | None + ) = None self.request_handlers: dict[type, Callable[..., Awaitable[types.ServerResult]]] = { types.PingRequest: _ping_handler, } - self.notification_handlers: dict[type, Callable[..., Awaitable[None]]] = {} + self.notification_handlers: dict[type, Callable[..., Awaitable[None]]] = { + types.CancelledNotification: self._handle_cancelled_notification, + } self._tool_cache: dict[str, types.Tool] = {} logger.debug("Initializing server %r", name) @@ -244,7 +258,7 @@ def decorator( wrapper = create_call_wrapper(func, types.ListPromptsRequest) - async def handler(req: types.ListPromptsRequest): + async def handler(req: types.ListPromptsRequest, _: Any = None): result = await wrapper(req) # Handle both old style (list[Prompt]) and new style (ListPromptsResult) if isinstance(result, types.ListPromptsResult): @@ -264,7 +278,7 @@ def decorator( ): logger.debug("Registering handler for GetPromptRequest") - async def handler(req: types.GetPromptRequest): + async def handler(req: types.GetPromptRequest, _: Any = None): prompt_get = await func(req.params.name, req.params.arguments) return types.ServerResult(prompt_get) @@ -282,7 +296,7 @@ def decorator( wrapper = create_call_wrapper(func, types.ListResourcesRequest) - async def handler(req: types.ListResourcesRequest): + async def handler(req: types.ListResourcesRequest, _: Any = None): result = await wrapper(req) # Handle both old style (list[Resource]) and new style (ListResourcesResult) if isinstance(result, types.ListResourcesResult): @@ -300,7 +314,7 @@ def list_resource_templates(self): def decorator(func: Callable[[], Awaitable[list[types.ResourceTemplate]]]): logger.debug("Registering handler for ListResourceTemplatesRequest") - async def handler(_: Any): + async def handler(_1: Any, _2: Any = None): templates = await func() return types.ServerResult(types.ListResourceTemplatesResult(resourceTemplates=templates)) @@ -315,7 +329,7 @@ def decorator( ): logger.debug("Registering handler for ReadResourceRequest") - async def handler(req: types.ReadResourceRequest): + async def handler(req: types.ReadResourceRequest, _: Any = None): result = await func(req.params.uri) def create_content(data: str | bytes, mime_type: str | None): @@ -371,7 +385,7 @@ def set_logging_level(self): def decorator(func: Callable[[types.LoggingLevel], Awaitable[None]]): logger.debug("Registering handler for SetLevelRequest") - async def handler(req: types.SetLevelRequest): + async def handler(req: types.SetLevelRequest, _: Any = None): await func(req.params.level) return types.ServerResult(types.EmptyResult()) @@ -384,7 +398,7 @@ def subscribe_resource(self): def decorator(func: Callable[[AnyUrl], Awaitable[None]]): logger.debug("Registering handler for SubscribeRequest") - async def handler(req: types.SubscribeRequest): + async def handler(req: types.SubscribeRequest, _: Any = None): await func(req.params.uri) return types.ServerResult(types.EmptyResult()) @@ -397,7 +411,7 @@ def unsubscribe_resource(self): def decorator(func: Callable[[AnyUrl], Awaitable[None]]): logger.debug("Registering handler for UnsubscribeRequest") - async def handler(req: types.UnsubscribeRequest): + async def handler(req: types.UnsubscribeRequest, _: Any = None): await func(req.params.uri) return types.ServerResult(types.EmptyResult()) @@ -415,7 +429,7 @@ def decorator( wrapper = create_call_wrapper(func, types.ListToolsRequest) - async def handler(req: types.ListToolsRequest): + async def handler(req: types.ListToolsRequest, _: Any = None): result = await wrapper(req) # Handle both old style (list[Tool]) and new style (ListToolsResult) @@ -485,7 +499,10 @@ def decorator( ): logger.debug("Registering handler for CallToolRequest") - async def handler(req: types.CallToolRequest): + # Store the tool function for async execution + self._tool_function = func + + async def handler(req: types.CallToolRequest, server_scope: TaskGroup): try: tool_name = req.params.name arguments = req.params.arguments or {} @@ -498,46 +515,75 @@ async def handler(req: types.CallToolRequest): except jsonschema.ValidationError as e: return self._make_error_result(f"Input validation error: {e.message}") - # tool call - results = await func(tool_name, arguments) + # Check for async execution + if tool and self.async_operations and self._should_execute_async(tool): + keep_alive = self._get_tool_keep_alive(tool) + immediate_content: list[types.ContentBlock] = [] - # output normalization - unstructured_content: UnstructuredContent - maybe_structured_content: StructuredContent | None - if isinstance(results, tuple) and len(results) == 2: - # tool returned both structured and unstructured content - unstructured_content, maybe_structured_content = cast(CombinationContent, results) - elif isinstance(results, dict): - # tool returned structured content only - maybe_structured_content = cast(StructuredContent, results) - unstructured_content = [types.TextContent(type="text", text=json.dumps(results, indent=2))] - elif hasattr(results, "__iter__"): - # tool returned unstructured content only - unstructured_content = cast(UnstructuredContent, results) - maybe_structured_content = None - else: - return self._make_error_result(f"Unexpected return type from tool: {type(results).__name__}") - - # output validation - if tool and tool.outputSchema is not None: - if maybe_structured_content is None: - return self._make_error_result( - "Output validation error: outputSchema defined but no structured output returned" - ) - else: + # Execute immediate result if available + if self._has_immediate_result(tool): try: - jsonschema.validate(instance=maybe_structured_content, schema=tool.outputSchema) - except jsonschema.ValidationError as e: - return self._make_error_result(f"Output validation error: {e.message}") - - # result - return types.ServerResult( - types.CallToolResult( - content=list(unstructured_content), - structuredContent=maybe_structured_content, - isError=False, + immediate_content = await self._execute_immediate_result(tool, arguments) + logger.debug(f"Executed immediate result for {tool_name}") + except McpError: + # Re-raise McpError as-is + raise + except Exception as e: + raise McpError( + types.ErrorData( + code=types.INTERNAL_ERROR, + message=f"Immediate result execution failed: {str(e)}", + ) + ) + + # Create async operation + operation = await self.async_operations.create_operation( + tool_name=tool_name, + arguments=arguments, + keep_alive=keep_alive, ) - ) + logger.debug(f"Created async operation with token: {operation.token}") + + # Add the operation token to the request context + ctx = RequestContext( + request_id=self.request_context.request_id, + operation_token=self.request_context.operation_token, + meta=self.request_context.meta, + session=self.request_context.session, + supports_async=self._client_supports_async(self.request_context.session), + lifespan_context=self.request_context.lifespan_context, + request=self.request_context.request, + ) + ctx.operation_token = operation.token + request_ctx.set(ctx) + + # Start task with tool name and arguments + current_request_context = request_ctx.get() + await self.async_operations.start_task( + operation.token, tool_name, arguments, current_request_context + ) + + # Return operation result with immediate content + logger.info(f"Returning async operation result for {tool_name}") + return types.ServerResult( + types.CallToolResult( + content=immediate_content, + operation=types.AsyncResultProperties( + token=operation.token, + keepAlive=operation.keep_alive, + ), + ) + ) + + # tool call + results = await func(tool_name, arguments) + + # Process results using shared logic + try: + result = self._process_tool_result(results, tool) + return types.ServerResult(result) + except ValueError as e: + return self._make_error_result(str(e)) except Exception as e: return self._make_error_result(str(e)) @@ -546,13 +592,141 @@ async def handler(req: types.CallToolRequest): return decorator + def _client_supports_async(self, session: ServerSession) -> bool: + """Check if the provided session supports async tools based on protocol version.""" + if session.client_params: + client_version = str(session.client_params.protocolVersion) + # Only "next" version supports async tools for now + return client_version == NEXT_PROTOCOL_VERSION + return False + + def _process_tool_result( + self, results: UnstructuredContent | StructuredContent | CombinationContent, tool: types.Tool | None = None + ) -> types.CallToolResult: + """Process tool results and create CallToolResult with validation.""" + # output normalization + unstructured_content: UnstructuredContent + maybe_structured_content: StructuredContent | None + if isinstance(results, tuple) and len(results) == 2: + # tool returned both structured and unstructured content + unstructured_content, maybe_structured_content = cast(CombinationContent, results) + elif isinstance(results, dict): + # tool returned structured content only + maybe_structured_content = cast(StructuredContent, results) + unstructured_content = [types.TextContent(type="text", text=json.dumps(results, indent=2))] + elif hasattr(results, "__iter__"): + # tool returned unstructured content only + unstructured_content = cast(UnstructuredContent, results) + maybe_structured_content = None + else: + raise ValueError(f"Unexpected return type from tool: {type(results).__name__}") + + # output validation + if tool and tool.outputSchema is not None: + if maybe_structured_content is None: + raise ValueError("Output validation error: outputSchema defined but no structured output returned") + else: + try: + jsonschema.validate(instance=maybe_structured_content, schema=tool.outputSchema) + except jsonschema.ValidationError as e: + raise ValueError(f"Output validation error: {e.message}") + + # result + return types.CallToolResult( + content=list(unstructured_content), + structuredContent=maybe_structured_content, + isError=False, + _operation=Operation(token=self.request_context.operation_token) + if self.request_context and self.request_context.operation_token + else None, + ) + + def _should_execute_async(self, tool: types.Tool) -> bool: + """Check if a tool should be executed asynchronously.""" + # Check if client supports async tools (protocol version "next") + try: + if self.request_context and self.request_context.session.client_params: + client_version = str(self.request_context.session.client_params.protocolVersion) + if client_version != "next": + return False + else: + return False + except (AttributeError, ValueError): + return False + + # Check if tool is async-only + invocation_mode = getattr(tool, "invocationMode", None) + return invocation_mode == "async" + + def _get_tool_keep_alive(self, tool: types.Tool) -> int: + """Get the keepalive value for an async tool.""" + if tool.internal.keepalive is None: + raise ValueError(f"keepalive not defined for tool {tool.name}") + return tool.internal.keepalive + + def _has_immediate_result(self, tool: types.Tool) -> bool: + """Check if tool has immediate_result function.""" + return tool.internal.immediate_result is not None and callable(tool.internal.immediate_result) + + async def _execute_immediate_result(self, tool: types.Tool, arguments: dict[str, Any]) -> list[types.ContentBlock]: + """Execute immediate result function and return content blocks.""" + immediate_fn = tool.internal.immediate_result + + if immediate_fn is None: + raise ValueError(f"No immediate_result function found for tool {tool.name}") + + # Validate function signature and execute + try: + result = await immediate_fn(**arguments) + if not isinstance(result, list): + raise ValueError("immediate_result must return list[ContentBlock]") + return cast(list[types.ContentBlock], result) + except McpError: + # Re-raise McpError as-is + raise + except Exception as e: + raise McpError( + types.ErrorData(code=types.INTERNAL_ERROR, message=f"Immediate result execution error: {str(e)}") + ) + + async def _execute_tool_async( + self, tool_name: str, arguments: dict[str, Any], request_context: Any + ) -> types.CallToolResult: + """Execute a tool asynchronously and return the result.""" + context_token = None + + try: + # Restore the request context for this task + if request_context: + context_token = request_ctx.set(request_context) + + logger.info(f"Starting async execution of tool '{tool_name}'") + + if not self._tool_function: + raise ValueError("No tool function registered") + + # Execute the tool function + results = await self._tool_function(tool_name, arguments) + + # Get tool definition for validation + tool = await self._get_cached_tool_definition(tool_name) + + # Process results using shared logic + result = self._process_tool_result(results, tool) + logger.info(f"Async execution of tool '{tool_name}' completed") + return result + + finally: + if context_token: + request_ctx.reset(context_token) + def progress_notification(self): def decorator( func: Callable[[str | int, float, float | None, str | None], Awaitable[None]], ): logger.debug("Registering handler for ProgressNotification") - async def handler(req: types.ProgressNotification): + async def handler(req: types.ProgressNotification, _: Any = None): await func( req.params.progressToken, req.params.progress, @@ -580,7 +754,7 @@ def decorator( ): logger.debug("Registering handler for CompleteRequest") - async def handler(req: types.CompleteRequest): + async def handler(req: types.CompleteRequest, _: Any = None): completion = await func(req.params.ref, req.params.argument, req.params.context) return types.ServerResult( types.CompleteResult( @@ -595,6 +769,110 @@ async def handler(req: types.CompleteRequest): return decorator + async def _validate_operation_token(self, token: str) -> ServerAsyncOperation: + """Validate operation token and return operation if valid.""" + operation = await self.async_operations.get_operation(token) + if not operation: + raise McpError(types.ErrorData(code=-32602, message="Invalid token")) + + if operation.is_expired: + raise McpError(types.ErrorData(code=-32602, message="Token expired")) + + # Check if operation was cancelled - ignore subsequent requests + if operation.status == "canceled": + raise McpError(types.ErrorData(code=-32602, message="Operation was cancelled")) + + return operation + + def get_operation_status(self): + """Register a handler for checking async tool execution status.""" + + def decorator(func: Callable[[str], Awaitable[types.GetOperationStatusResult]]): + logger.debug("Registering handler for GetOperationStatusRequest") + + async def handler(req: types.GetOperationStatusRequest, _: Any = None): + # Validate token and get operation + operation = await self._validate_operation_token(req.params.token) + + return types.ServerResult( + types.GetOperationStatusResult( + status=operation.status, + error=operation.error, + ) + ) + + self.request_handlers[types.GetOperationStatusRequest] = handler + return func + + return decorator + + def get_operation_result(self): + """Register a handler for retrieving async tool execution results.""" + + def decorator(func: Callable[[str], Awaitable[types.GetOperationPayloadResult]]): + logger.debug("Registering handler for GetOperationPayloadRequest") + + async def handler(req: types.GetOperationPayloadRequest, _: Any = None): + # Validate token and get operation + operation = await self._validate_operation_token(req.params.token) + + if operation.status != "completed": + raise McpError( + types.ErrorData(code=-32600, message=f"Operation not completed (status: {operation.status})") + ) + + if not operation.result: + raise McpError(types.ErrorData(code=-32600, message="No result available for completed operation")) + + return types.ServerResult(types.GetOperationPayloadResult(result=operation.result)) + + self.request_handlers[types.GetOperationPayloadRequest] = handler + return func + + return decorator + + async def handle_cancelled_notification(self, request_id: RequestId) -> None: + """Handle cancellation notification for a request.""" + # Check if this request ID corresponds to an async operation + if request_id in self._request_to_operation: + token = self._request_to_operation[request_id] + # Cancel the operation + if await self.async_operations.cancel_operation(token): + logger.debug(f"Cancelled async operation {token} for request {request_id}") + # Clean up the mapping + del self._request_to_operation[request_id] + + async def _handle_cancelled_notification(self, notification: types.CancelledNotification) -> None: + """Handle cancelled notification from client.""" + request_id = notification.params.requestId + logger.debug(f"Received cancellation notification for request {request_id}") + await self.handle_cancelled_notification(request_id) + + async def send_request_for_operation(self, token: str, request: types.ServerRequest) -> None: + """Send a request associated with an async operation.""" + # Mark operation as requiring input + if await self.async_operations.mark_input_required(token): + # Add operation token to request + if hasattr(request.root, "params") and request.root.params is not None: + if not hasattr(request.root.params, "operation") or request.root.params.operation is None: + request.root.params.operation = Operation(token=token) + logger.debug(f"Marked operation {token} as input_required and added to request") + + async def send_notification_for_operation(self, token: str, notification: types.ServerNotification) -> None: + """Send a notification associated with an async operation.""" + # Mark operation as requiring input + if await self.async_operations.mark_input_required(token): + # Add operation token to notification + if hasattr(notification.root, "params") and notification.root.params is not None: + if not hasattr(notification.root.params, "operation") or notification.root.params.operation is None: + notification.root.params.operation = Operation(token=token) + logger.debug(f"Marked operation {token} as input_required and added to notification") + + async def complete_request_for_operation(self, token: str) -> None: + """Mark that a request for an operation has been completed.""" + if await self.async_operations.mark_input_completed(token): + logger.debug(f"Marked operation {token} as no longer requiring input") + async def run( self, read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], @@ -632,6 +910,7 @@ async def run( session, lifespan_context, raise_exceptions, + tg, ) async def _handle_message( @@ -640,13 +919,16 @@ async def _handle_message( session: ServerSession, lifespan_context: LifespanResultT, raise_exceptions: bool = False, + server_scope: TaskGroup | None = None, ): with warnings.catch_warnings(record=True) as w: # TODO(Marcelo): We should be checking if message is Exception here. match message: # type: ignore[reportMatchNotExhaustive] case RequestResponder(request=types.ClientRequest(root=req)) as responder: with responder: - await self._handle_request(message, req, session, lifespan_context, raise_exceptions) + await self._handle_request( + message, req, session, lifespan_context, raise_exceptions, server_scope + ) case types.ClientNotification(root=notify): await self._handle_notification(notify) @@ -660,12 +942,13 @@ async def _handle_request( session: ServerSession, lifespan_context: LifespanResultT, raise_exceptions: bool, + server_scope: TaskGroup | None = None, ): logger.info("Processing request of type %s", type(req).__name__) if handler := self.request_handlers.get(type(req)): # type: ignore logger.debug("Dispatching request of type %s", type(req).__name__) - token = None + context_token = None try: # Extract request context from message metadata request_data = None @@ -674,16 +957,28 @@ async def _handle_request( # Set our global state that can be retrieved via # app.get_request_context() - token = request_ctx.set( + context_token = request_ctx.set( RequestContext( - message.request_id, - message.request_meta, - session, - lifespan_context, + request_id=message.request_id, + operation_token=message.operation.token if message.operation else None, + meta=message.request_meta, + session=session, + supports_async=self._client_supports_async(session), + lifespan_context=lifespan_context, request=request_data, ) ) - response = await handler(req) + response = await handler(req, server_scope) + + # Track async operations for cancellation + if isinstance(req, types.CallToolRequest): + result = response.root + if isinstance(result, types.CallToolResult) and result.operation is not None: + # This is an async operation, track the request ID to token mapping + operation_token = result.operation.token + self._request_to_operation[message.request_id] = operation_token + logger.debug(f"Tracking async operation {operation_token} for request {message.request_id}") + except McpError as err: response = err.error except anyio.get_cancelled_exc_class(): @@ -698,8 +993,8 @@ async def _handle_request( response = types.ErrorData(code=0, message=str(err), data=None) finally: # Reset the global state after we are done - if token is not None: - request_ctx.reset(token) + if context_token is not None: + request_ctx.reset(context_token) await message.respond(response) else: @@ -722,5 +1017,5 @@ async def _handle_notification(self, notify: Any): logger.exception("Uncaught exception in notification handler") -async def _ping_handler(request: types.PingRequest) -> types.ServerResult: +async def _ping_handler(request: types.PingRequest, _: Any = None) -> types.ServerResult: return types.ServerResult(types.EmptyResult()) diff --git a/src/mcp/server/session.py b/src/mcp/server/session.py index d00277f11..4cb85f3ec 100644 --- a/src/mcp/server/session.py +++ b/src/mcp/server/session.py @@ -186,8 +186,13 @@ async def send_log_message( data: Any, logger: str | None = None, related_request_id: types.RequestId | None = None, + related_operation_token: str | None = None, ) -> None: """Send a log message notification.""" + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + await self.send_notification( types.ServerNotification( types.LoggingMessageNotification( @@ -195,6 +200,7 @@ async def send_log_message( level=level, data=data, logger=logger, + _operation=operation, ), ) ), @@ -223,8 +229,13 @@ async def create_message( metadata: dict[str, Any] | None = None, model_preferences: types.ModelPreferences | None = None, related_request_id: types.RequestId | None = None, + related_operation_token: str | None = None, ) -> types.CreateMessageResult: """Send a sampling/create_message request.""" + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + return await self.send_request( request=types.ServerRequest( types.CreateMessageRequest( @@ -237,6 +248,7 @@ async def create_message( stopSequences=stop_sequences, metadata=metadata, modelPreferences=model_preferences, + _operation=operation, ), ) ), @@ -258,22 +270,30 @@ async def elicit( message: str, requestedSchema: types.ElicitRequestedSchema, related_request_id: types.RequestId | None = None, + related_operation_token: str | None = None, ) -> types.ElicitResult: """Send an elicitation/create request. Args: message: The message to present to the user requestedSchema: Schema defining the expected response structure + related_request_id: Optional request ID this elicitation is related to + related_operation_token: Optional operation token this elicitation is related to Returns: The client's response """ + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + return await self.send_request( types.ServerRequest( types.ElicitRequest( params=types.ElicitRequestParams( message=message, requestedSchema=requestedSchema, + _operation=operation, ), ) ), @@ -295,8 +315,13 @@ async def send_progress_notification( total: float | None = None, message: str | None = None, related_request_id: str | None = None, + related_operation_token: str | None = None, ) -> None: """Send a progress notification.""" + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + await self.send_notification( types.ServerNotification( types.ProgressNotification( @@ -305,6 +330,7 @@ async def send_progress_notification( progress=progress, total=total, message=message, + _operation=operation, ), ) ), diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index b45d742b0..da448794e 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -172,6 +172,8 @@ def __init__( ], ] = {} self._terminated = False + # Track operation tokens to original request IDs for stream resumption + self._operation_to_request_id: dict[str, str] = {} @property def is_terminated(self) -> bool: @@ -306,6 +308,94 @@ def _check_content_type(self, request: Request) -> bool: return any(part == CONTENT_TYPE_JSON for part in content_type_parts) + def _is_async_operation_response(self, response_message: JSONRPCMessage) -> bool: + """Check if response is for an async operation that should keep stream open.""" + try: + if not isinstance(response_message.root, JSONRPCResponse): + return False + + result = response_message.root.result + if not result: + return False + + # Check if result has _operation with token + if hasattr(result, "__getitem__") and "_operation" in result: + operation = result["_operation"] # type: ignore + if hasattr(operation, "__getitem__") and "token" in operation: + return bool(operation["token"]) # type: ignore + + return False + except (TypeError, KeyError, AttributeError) as exc: + logger.exception("Exception in _is_async_operation_response: %s", exc) + return False + + async def _handle_sse_mode( + self, + message: JSONRPCMessage, + request: Request, + writer: MemoryObjectSendStream[SessionMessage | Exception], + request_id: str, + request_stream_reader: MemoryObjectReceiveStream[EventMessage], + scope: Scope, + receive: Receive, + send: Send, + ) -> None: + """Handle SSE response mode.""" + # Create SSE stream + sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[dict[str, str]](0) + + async def sse_writer(): + # Get the request ID from the incoming request message + try: + async with sse_stream_writer, request_stream_reader: + # Process messages from the request-specific stream + async for event_message in request_stream_reader: + # Build the event data + event_data = self._create_event_data(event_message) + await sse_stream_writer.send(event_data) + + # If response, remove from pending streams and close + if isinstance( + event_message.message.root, + JSONRPCResponse | JSONRPCError, + ): + break + except Exception: + logger.exception("Error in SSE writer") + finally: + logger.debug("Closing SSE writer") + await self._clean_up_memory_streams(request_id) + + # Create and start EventSourceResponse + # SSE stream mode (original behavior) + # Set up headers + headers = { + "Cache-Control": "no-cache, no-transform", + "Connection": "keep-alive", + "Content-Type": CONTENT_TYPE_SSE, + **({MCP_SESSION_ID_HEADER: self.mcp_session_id} if self.mcp_session_id else {}), + } + response = EventSourceResponse( + content=sse_stream_reader, + data_sender_callable=sse_writer, + headers=headers, + ) + + # Start the SSE response (this will send headers immediately) + try: + # First send the response to establish the SSE connection + async with anyio.create_task_group() as tg: + tg.start_soon(response, scope, receive, send) + # Then send the message to be processed by the server + metadata = ServerMessageMetadata(request_context=request) + session_message = SessionMessage(message, metadata=metadata) + await writer.send(session_message) + except Exception: + logger.exception("SSE response error") + await sse_stream_writer.aclose() + await sse_stream_reader.aclose() + await self._clean_up_memory_streams(request_id) + async def _handle_post_request(self, scope: Scope, request: Request, receive: Receive, send: Send) -> None: """Handle POST requests containing JSON-RPC messages.""" writer = self._read_stream_writer @@ -399,6 +489,7 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re metadata = ServerMessageMetadata(request_context=request) session_message = SessionMessage(message, metadata=metadata) await writer.send(session_message) + should_pop_stream = True # Default to cleaning up stream try: # Process messages from the request-specific stream # We need to collect all messages until we get a response @@ -416,6 +507,11 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re # At this point we should have a response if response_message: + # Check if this is an async operation response - keep stream open + if self._is_async_operation_response(response_message): + # This is an async operation - keep the stream open for elicitation/sampling + should_pop_stream = False + # Create JSON response response = self._create_json_response(response_message) await response(scope, receive, send) @@ -436,63 +532,13 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re ) await response(scope, receive, send) finally: - await self._clean_up_memory_streams(request_id) - else: - # Create SSE stream - sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[dict[str, str]](0) - - async def sse_writer(): - # Get the request ID from the incoming request message - try: - async with sse_stream_writer, request_stream_reader: - # Process messages from the request-specific stream - async for event_message in request_stream_reader: - # Build the event data - event_data = self._create_event_data(event_message) - await sse_stream_writer.send(event_data) - - # If response, remove from pending streams and close - if isinstance( - event_message.message.root, - JSONRPCResponse | JSONRPCError, - ): - break - except Exception: - logger.exception("Error in SSE writer") - finally: - logger.debug("Closing SSE writer") + if should_pop_stream: await self._clean_up_memory_streams(request_id) - - # Create and start EventSourceResponse - # SSE stream mode (original behavior) - # Set up headers - headers = { - "Cache-Control": "no-cache, no-transform", - "Connection": "keep-alive", - "Content-Type": CONTENT_TYPE_SSE, - **({MCP_SESSION_ID_HEADER: self.mcp_session_id} if self.mcp_session_id else {}), - } - response = EventSourceResponse( - content=sse_stream_reader, - data_sender_callable=sse_writer, - headers=headers, + else: + await self._handle_sse_mode( + message, request, writer, request_id, request_stream_reader, scope, receive, send ) - # Start the SSE response (this will send headers immediately) - try: - # First send the response to establish the SSE connection - async with anyio.create_task_group() as tg: - tg.start_soon(response, scope, receive, send) - # Then send the message to be processed by the server - metadata = ServerMessageMetadata(request_context=request) - session_message = SessionMessage(message, metadata=metadata) - await writer.send(session_message) - except Exception: - logger.exception("SSE response error") - await sse_stream_writer.aclose() - await sse_stream_reader.aclose() - await self._clean_up_memory_streams(request_id) - except Exception as err: logger.exception("Error handling POST request") response = self._create_error_response( @@ -838,6 +884,38 @@ async def message_router(): # If this response is for an existing request stream, # send it there target_request_id = response_id + + # Track operation tokens for stream resumption + if ( + isinstance(message.root, JSONRPCResponse) + and message.root.result + and "_operation" in message.root.result + and ( + ("token" in message.root.result["_operation"]) + and message.root.result["_operation"]["token"] + ) + ): + operation_token = message.root.result["_operation"]["token"] + self._operation_to_request_id[operation_token] = response_id + logger.info(f"Tracking operation token {operation_token} -> request {response_id}") + elif ( + message.root.params + and "_operation" in message.root.params + and ( + ("token" in message.root.params["_operation"]) + and message.root.params["_operation"]["token"] + ) + ): + # Route operation-related messages back to the original request stream + operation_token = message.root.params["_operation"]["token"] + if operation_token in self._operation_to_request_id: + target_request_id = self._operation_to_request_id[operation_token] + logging.info(operation_token) + else: + logger.warning( + f"Operation token {operation_token} not found in mapping, using GET_STREAM_KEY" + ) + target_request_id = GET_STREAM_KEY # Extract related_request_id from meta if it exists elif ( session_message.metadata is not None diff --git a/src/mcp/shared/async_operations.py b/src/mcp/shared/async_operations.py new file mode 100644 index 000000000..d7029ac5e --- /dev/null +++ b/src/mcp/shared/async_operations.py @@ -0,0 +1,512 @@ +"""Async operations management for FastMCP servers.""" + +from __future__ import annotations + +import contextlib +import logging +import secrets +import time +from collections import deque +from collections.abc import AsyncIterator, Awaitable, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar + +import anyio +from anyio.abc import TaskGroup + +import mcp.types as types +from mcp.types import AsyncOperationStatus + +if TYPE_CHECKING: + # Avoid circular import with mcp.server.lowlevel.Server + from mcp.server.session import ServerSession + from mcp.shared.context import RequestContext + +logger = logging.getLogger(__name__) + + +@dataclass +class PendingAsyncTask: + """Represents a task waiting to be dispatched.""" + + token: str + tool_name: str + arguments: dict[str, Any] + request_context: Any # The RequestContext object to restore + + +@dataclass +class ClientAsyncOperation: + """Minimal operation tracking for client-side use.""" + + token: str + tool_name: str + created_at: float + keep_alive: int + + @property + def is_expired(self) -> bool: + """Check if operation has expired based on keepAlive.""" + return time.time() > (self.created_at + self.keep_alive * 2) # Give some buffer before expiration + + +@dataclass +class ServerAsyncOperation: + """Represents an async tool operation.""" + + token: str + tool_name: str + arguments: dict[str, Any] + status: AsyncOperationStatus + created_at: float + keep_alive: int + resolved_at: float | None = None + session_id: str | None = None + result: types.CallToolResult | None = None + error: str | None = None + + @property + def is_expired(self) -> bool: + """Check if operation has expired based on keepAlive.""" + if not self.resolved_at: + return False + if self.status in ("completed", "failed", "canceled"): + return time.time() > (self.resolved_at + self.keep_alive) + return False + + @property + def is_terminal(self) -> bool: + """Check if operation is in a terminal state.""" + return self.status in ("completed", "failed", "canceled", "unknown") + + +OperationT = TypeVar("OperationT", ClientAsyncOperation, ServerAsyncOperation) + + +class BaseOperationManager(Generic[OperationT]): + """Base class for operation management.""" + + def __init__(self, *, token_generator: Callable[[str | None], str] | None = None): + self._operations: dict[str, OperationT] = {} + self._cleanup_interval = 60 # Cleanup every 60 seconds + self._token_generator = token_generator or self._default_token_generator + self._running = False + + def _default_token_generator(self, session_id: str | None = None) -> str: + """Default token generation using random tokens.""" + return secrets.token_urlsafe(32) + + def generate_token(self, session_id: str | None = None) -> str: + """Generate a token.""" + return self._token_generator(session_id) + + def _get_operation(self, token: str) -> OperationT | None: + """Internal method to get operation by token.""" + return self._operations.get(token) + + def _set_operation(self, token: str, operation: OperationT) -> None: + """Internal method to store an operation.""" + self._operations[token] = operation + + def _remove_operation(self, token: str) -> OperationT | None: + """Internal method to remove and return an operation.""" + return self._operations.pop(token, None) + + def get_operation(self, token: str) -> OperationT | None: + """Get operation by token.""" + return self._get_operation(token) + + def remove_operation(self, token: str) -> bool: + """Remove an operation by token.""" + return self._remove_operation(token) is not None + + def cleanup_expired(self) -> int: + """Remove expired operations and return count of removed operations.""" + expired_tokens = [token for token, operation in self._operations.items() if operation.is_expired] + for token in expired_tokens: + self._remove_operation(token) + return len(expired_tokens) + + async def stop_cleanup_loop(self) -> None: + self._running = False + + async def cleanup_loop(self) -> None: + """Background task to clean up expired operations.""" + if self._running: + return + self._running = True + + while self._running: + await anyio.sleep(self._cleanup_interval) + count = self.cleanup_expired() + if count > 0: + logger.debug(f"Cleaned up {count} expired operations") + + +class AsyncOperationStore(Protocol): + """Protocol for async operation storage implementations.""" + + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + ... + + async def store_operation(self, operation: ServerAsyncOperation) -> None: + """Store an operation.""" + ... + + async def update_status(self, token: str, status: AsyncOperationStatus) -> bool: + """Update operation status.""" + ... + + async def complete_operation_with_result(self, token: str, result: types.CallToolResult) -> bool: + """Complete operation with result.""" + ... + + async def fail_operation_with_error(self, token: str, error: str) -> bool: + """Fail operation with error.""" + ... + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + ... + + +class AsyncOperationBroker(Protocol): + """Protocol for async operation queueing and scheduling.""" + + async def enqueue_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue a task for execution.""" + ... + + async def get_pending_tasks(self) -> list[PendingAsyncTask]: + """Get all pending tasks.""" + ... + + async def acknowledge_task(self, token: str) -> None: + """Acknowledge that a task has been dispatched.""" + ... + + async def complete_task(self, token: str) -> None: + """Remove a completed task from persistent storage.""" + ... + + +class ClientAsyncOperationManager(BaseOperationManager[ClientAsyncOperation]): + """Manages client-side operation tracking.""" + + def track_operation(self, token: str, tool_name: str, keep_alive: int = 3600) -> None: + """Track a client operation.""" + operation = ClientAsyncOperation( + token=token, + tool_name=tool_name, + created_at=time.time(), + keep_alive=keep_alive, + ) + self._set_operation(token, operation) + + def get_tool_name(self, token: str) -> str | None: + """Get tool name for a tracked operation.""" + operation = self._get_operation(token) + return operation.tool_name if operation else None + + +class ServerAsyncOperationManager: + """Manages async tool operations using Store and Broker components.""" + + def __init__( + self, + store: AsyncOperationStore | None = None, + broker: AsyncOperationBroker | None = None, + *, + token_generator: Callable[[str | None], str] | None = None, + ): + # Use provided implementations or default to InMemory + self.store = store or InMemoryAsyncOperationStore() + self.broker = broker or InMemoryAsyncOperationBroker() + self._token_generator = token_generator or self._default_token_generator + self._tool_executor: Callable[[str, dict[str, Any], Any], Awaitable[types.CallToolResult]] | None = None + self._task_group: TaskGroup | None = None + self._run_lock = anyio.Lock() + self._running = False + + def set_handler(self, tool_executor: Callable[[str, dict[str, Any], Any], Awaitable[types.CallToolResult]]) -> None: + """Set the tool executor handler for late binding.""" + self._tool_executor = tool_executor + + def _default_token_generator(self, session_id: str | None = None) -> str: + """Default token generation using random tokens.""" + return secrets.token_urlsafe(32) + + def generate_token(self, session_id: str | None = None) -> str: + """Generate a token.""" + return self._token_generator(session_id) + + @contextlib.asynccontextmanager + async def run(self) -> AsyncIterator[None]: + """Run the async operations manager with its own task group.""" + # Thread-safe check to ensure run() is only called once + async with self._run_lock: + if self._running: + raise RuntimeError("ServerAsyncOperationManager.run() is already running.") + self._running = True + + async with anyio.create_task_group() as tg: + self._task_group = tg + logger.info("ServerAsyncOperationManager started") + # Start cleanup loop and task dispatcher + tg.start_soon(self._cleanup_loop) + tg.start_soon(self._task_dispatcher) + try: + yield + finally: + logger.info("ServerAsyncOperationManager shutting down") + # Stop cleanup loop gracefully + await self._stop_cleanup_loop() + # Cancel task group to stop all spawned tasks + tg.cancel_scope.cancel() + self._task_group = None + self._running = False + + async def _cleanup_loop(self) -> None: + """Background cleanup loop for expired operations.""" + while self._running: + await anyio.sleep(60) # Cleanup every 60 seconds + count = await self.store.cleanup_expired() + if count > 0: + logger.debug(f"Cleaned up {count} expired operations") + + async def _stop_cleanup_loop(self) -> None: + """Stop the cleanup loop.""" + self._running = False + + async def _task_dispatcher(self) -> None: + """Background task dispatcher that processes queued tasks.""" + while self._running: + await anyio.sleep(0.1) # Check for tasks frequently + pending_tasks = await self.broker.get_pending_tasks() + for task in pending_tasks: + if self._task_group and self._tool_executor: + logger.debug(f"Dispatching queued async task {task.token}") + self._task_group.start_soon(self._execute_tool_task, task, name=f"lro_{task.token}") + # Acknowledge that we've dispatched this task + await self.broker.acknowledge_task(task.token) + + async def _execute_tool_task(self, task: PendingAsyncTask) -> None: + """Execute a tool task.""" + try: + if not self._tool_executor: + raise ValueError("No tool executor configured") + + await self.mark_working(task.token) + result = await self._tool_executor(task.tool_name, task.arguments, task.request_context) + await self.complete_operation(task.token, result) + + except Exception as e: + logger.exception(f"Tool task {task.token} failed: {e}") + await self.fail_operation(task.token, str(e)) + + async def start_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue an async task for execution.""" + await self.broker.enqueue_task(token, tool_name, arguments, request_context) + + async def create_operation( + self, + tool_name: str, + arguments: dict[str, Any], + keep_alive: int = 3600, + session_id: str | None = None, + ) -> ServerAsyncOperation: + """Create a new async operation.""" + token = self.generate_token(session_id) + operation = ServerAsyncOperation( + token=token, + tool_name=tool_name, + arguments=arguments, + status="submitted", + created_at=time.time(), + keep_alive=keep_alive, + session_id=session_id, + ) + await self.store.store_operation(operation) + logger.info(f"Created async operation {token} for tool '{tool_name}'") + return operation + + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + return await self.store.get_operation(token) + + async def mark_working(self, token: str) -> bool: + """Mark operation as working.""" + return await self.store.update_status(token, "working") + + async def complete_operation(self, token: str, result: types.CallToolResult) -> bool: + """Complete operation with result.""" + success = await self.store.complete_operation_with_result(token, result) + if success: + await self.broker.complete_task(token) + logger.info(f"Async operation {token} completed successfully") + return success + + async def fail_operation(self, token: str, error: str) -> bool: + """Fail operation with error.""" + success = await self.store.fail_operation_with_error(token, error) + if success: + await self.broker.complete_task(token) + logger.info(f"Async operation {token} failed: {error}") + return success + + async def cancel_operation(self, token: str) -> bool: + """Cancel operation.""" + operation = await self.store.get_operation(token) + if not operation or operation.status in ("completed", "failed", "canceled"): + return False + + # Create new operation with updated fields instead of mutating + cancelled_operation = ServerAsyncOperation( + token=operation.token, + tool_name=operation.tool_name, + arguments=operation.arguments, + status="canceled", + created_at=operation.created_at, + keep_alive=operation.keep_alive, + resolved_at=time.time(), + session_id=operation.session_id, + result=operation.result, + error=operation.error, + ) + await self.store.store_operation(cancelled_operation) + await self.broker.complete_task(token) # Clean up from broker + logger.info(f"Async operation {token} was cancelled") + return True + + async def mark_input_required(self, token: str) -> bool: + """Mark operation as requiring input.""" + operation = await self.store.get_operation(token) + if not operation or operation.status not in ("submitted", "working"): + return False + + await self.store.update_status(token, "input_required") + return True + + async def mark_input_completed(self, token: str) -> bool: + """Mark input as completed, transitioning back to working.""" + operation = await self.store.get_operation(token) + if not operation or operation.status != "input_required": + return False + + await self.store.update_status(token, "working") + return True + + async def get_operation_result(self, token: str) -> types.CallToolResult | None: + """Get result for completed operation.""" + operation = await self.store.get_operation(token) + if not operation or operation.status != "completed": + return None + return operation.result + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + return await self.store.cleanup_expired() + + +class InMemoryAsyncOperationStore(AsyncOperationStore): + """In-memory implementation of AsyncOperationStore.""" + + def __init__(self): + self._operations: dict[str, ServerAsyncOperation] = {} + + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + return self._operations.get(token) + + async def store_operation(self, operation: ServerAsyncOperation) -> None: + """Store an operation.""" + self._operations[operation.token] = operation + + async def update_status(self, token: str, status: AsyncOperationStatus) -> bool: + """Update operation status.""" + operation = self._operations.get(token) + if not operation: + return False + + # Don't allow transitions from terminal states + if operation.is_terminal: + return False + + operation.status = status + if status in ("completed", "failed", "canceled"): + operation.resolved_at = time.time() + return True + + async def complete_operation_with_result(self, token: str, result: types.CallToolResult) -> bool: + """Complete operation with result.""" + operation = self._operations.get(token) + if not operation or operation.is_terminal: + return False + + operation.status = "completed" + operation.result = result + operation.resolved_at = time.time() + return True + + async def fail_operation_with_error(self, token: str, error: str) -> bool: + """Fail operation with error.""" + operation = self._operations.get(token) + if not operation or operation.is_terminal: + return False + + operation.status = "failed" + operation.error = error + operation.resolved_at = time.time() + return True + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + expired_tokens = [token for token, op in self._operations.items() if op.is_expired] + for token in expired_tokens: + del self._operations[token] + return len(expired_tokens) + + +class InMemoryAsyncOperationBroker(AsyncOperationBroker): + """In-memory implementation of AsyncOperationBroker.""" + + def __init__(self): + self._task_queue: deque[PendingAsyncTask] = deque() + + async def enqueue_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue a task for execution.""" + task = PendingAsyncTask(token=token, tool_name=tool_name, arguments=arguments, request_context=request_context) + self._task_queue.append(task) + + async def get_pending_tasks(self) -> list[PendingAsyncTask]: + """Get all pending tasks without clearing them.""" + return list(self._task_queue) + + async def acknowledge_task(self, token: str) -> None: + """Acknowledge that a task has been dispatched.""" + # Remove the task from the queue + self._task_queue = deque(task for task in self._task_queue if task.token != token) + + async def complete_task(self, token: str) -> None: + """Remove a completed task from persistent storage.""" + # For in-memory broker, this is the same as acknowledge + self._task_queue = deque(task for task in self._task_queue if task.token != token) diff --git a/src/mcp/shared/context.py b/src/mcp/shared/context.py index f3006e7d5..d83c9f7bb 100644 --- a/src/mcp/shared/context.py +++ b/src/mcp/shared/context.py @@ -12,9 +12,26 @@ @dataclass -class RequestContext(Generic[SessionT, LifespanContextT, RequestT]): +class SerializableRequestContext: + """Serializable subset of RequestContext for persistent storage.""" + request_id: RequestId + operation_token: str | None meta: RequestParams.Meta | None + supports_async: bool + + +@dataclass +class RequestContext(SerializableRequestContext, Generic[SessionT, LifespanContextT, RequestT]): session: SessionT lifespan_context: LifespanContextT request: RequestT | None = None + + def to_serializable(self) -> SerializableRequestContext: + """Extract serializable parts of this context.""" + return SerializableRequestContext( + request_id=self.request_id, + operation_token=self.operation_token, + meta=self.meta, + supports_async=self.supports_async, + ) diff --git a/src/mcp/shared/memory.py b/src/mcp/shared/memory.py index 265d07c37..e822d980d 100644 --- a/src/mcp/shared/memory.py +++ b/src/mcp/shared/memory.py @@ -57,6 +57,7 @@ async def create_connected_server_and_client_session( client_info: types.Implementation | None = None, raise_exceptions: bool = False, elicitation_callback: ElicitationFnT | None = None, + protocol_version: str | None = None, ) -> AsyncGenerator[ClientSession, None]: """Creates a ClientSession that is connected to a running MCP server.""" @@ -70,29 +71,31 @@ async def create_connected_server_and_client_session( server_read, server_write = server_streams # Create a cancel scope for the server task - async with anyio.create_task_group() as tg: - tg.start_soon( - lambda: server.run( - server_read, - server_write, - server.create_initialization_options(), - raise_exceptions=raise_exceptions, + async with server.async_operations.run(): + async with anyio.create_task_group() as tg: + tg.start_soon( + lambda: server.run( + server_read, + server_write, + server.create_initialization_options(), + raise_exceptions=raise_exceptions, + ) ) - ) - - try: - async with ClientSession( - read_stream=client_read, - write_stream=client_write, - read_timeout_seconds=read_timeout_seconds, - sampling_callback=sampling_callback, - list_roots_callback=list_roots_callback, - logging_callback=logging_callback, - message_handler=message_handler, - client_info=client_info, - elicitation_callback=elicitation_callback, - ) as client_session: - await client_session.initialize() - yield client_session - finally: - tg.cancel_scope.cancel() + + try: + async with ClientSession( + read_stream=client_read, + write_stream=client_write, + read_timeout_seconds=read_timeout_seconds, + sampling_callback=sampling_callback, + list_roots_callback=list_roots_callback, + logging_callback=logging_callback, + message_handler=message_handler, + client_info=client_info, + elicitation_callback=elicitation_callback, + protocol_version=protocol_version, + ) as client_session: + await client_session.initialize() + yield client_session + finally: + tg.cancel_scope.cancel() diff --git a/src/mcp/shared/session.py b/src/mcp/shared/session.py index 4e774984d..6d11469c8 100644 --- a/src/mcp/shared/session.py +++ b/src/mcp/shared/session.py @@ -16,16 +16,20 @@ from mcp.types import ( CONNECTION_CLOSED, INVALID_PARAMS, + CallToolResult, CancelledNotification, ClientNotification, ClientRequest, ClientResult, ErrorData, + GetOperationPayloadRequest, + GetOperationPayloadResult, JSONRPCError, JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, + Operation, ProgressNotification, RequestParams, ServerNotification, @@ -70,6 +74,7 @@ def __init__( request_id: RequestId, request_meta: RequestParams.Meta | None, request: ReceiveRequestT, + operation: Operation | None, session: """BaseSession[ SendRequestT, SendNotificationT, @@ -83,6 +88,7 @@ def __init__( self.request_id = request_id self.request_meta = request_meta self.request = request + self.operation = operation self.message_metadata = message_metadata self._session = session self._completed = False @@ -177,6 +183,7 @@ class BaseSession( _request_id: int _in_flight: dict[RequestId, RequestResponder[ReceiveRequestT, SendResultT]] _progress_callbacks: dict[RequestId, ProgressFnT] + _operation_requests: dict[str, RequestId] def __init__( self, @@ -196,6 +203,7 @@ def __init__( self._session_read_timeout_seconds = read_timeout_seconds self._in_flight = {} self._progress_callbacks = {} + self._operation_requests = {} self._exit_stack = AsyncExitStack() async def __aenter__(self) -> Self: @@ -251,6 +259,7 @@ async def send_request( # Store the callback for this request self._progress_callbacks[request_id] = progress_callback + pop_progress: RequestId | None = request_id try: jsonrpc_request = JSONRPCRequest( jsonrpc="2.0", @@ -285,11 +294,28 @@ async def send_request( if isinstance(response_or_error, JSONRPCError): raise McpError(response_or_error.error) else: - return result_type.model_validate(response_or_error.result) + result = result_type.model_validate(response_or_error.result) + if isinstance(result, CallToolResult) and result.operation is not None: + # Store mapping of operation token to request ID for async operations + self._operation_requests[result.operation.token] = request_id + + # Don't pop the progress function if we were given one + pop_progress = None + elif isinstance(request, GetOperationPayloadRequest) and isinstance(result, GetOperationPayloadResult): + # Checked request and result to ensure no error + operation_token = request.params.token + + # Pop the progress function for the original request + pop_progress = self._operation_requests[operation_token] + + # Pop the token mapping since we know we won't need it anymore + self._operation_requests.pop(operation_token, None) + return result finally: self._response_streams.pop(request_id, None) - self._progress_callbacks.pop(request_id, None) + if pop_progress: + self._progress_callbacks.pop(pop_progress, None) await response_stream.aclose() await response_stream_reader.aclose() @@ -348,6 +374,9 @@ async def _receive_loop(self) -> None: if validated_request.root.params else None, request=validated_request, + operation=validated_request.root.params.operation + if validated_request.root.params + else None, session=self, on_complete=lambda r: self._in_flight.pop(r.request_id, None), message_metadata=message.metadata, diff --git a/src/mcp/shared/version.py b/src/mcp/shared/version.py index 23c46d04b..12d7df67a 100644 --- a/src/mcp/shared/version.py +++ b/src/mcp/shared/version.py @@ -1,3 +1,3 @@ -from mcp.types import LATEST_PROTOCOL_VERSION +from mcp.types import LATEST_PROTOCOL_VERSION, NEXT_PROTOCOL_VERSION -SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", "2025-03-26", LATEST_PROTOCOL_VERSION] +SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", "2025-03-26", LATEST_PROTOCOL_VERSION, NEXT_PROTOCOL_VERSION] diff --git a/src/mcp/types.py b/src/mcp/types.py index 871322740..83d766496 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -24,6 +24,7 @@ """ LATEST_PROTOCOL_VERSION = "2025-06-18" +NEXT_PROTOCOL_VERSION = "next" # Development version with async tool support """ The default negotiated version of the Model Context Protocol when no version is specified. @@ -40,6 +41,12 @@ AnyFunction: TypeAlias = Callable[..., Any] +class Operation(BaseModel): + token: str + """The token associated with the originating asynchronous tool call.""" + model_config = ConfigDict(extra="allow") + + class RequestParams(BaseModel): class Meta(BaseModel): progressToken: ProgressToken | None = None @@ -53,6 +60,8 @@ class Meta(BaseModel): model_config = ConfigDict(extra="allow") meta: Meta | None = Field(alias="_meta", default=None) + operation: Operation | None = Field(alias="_operation", default=None) + """Async operation parameters, only used when a request is sent during an asynchronous tool call.""" class PaginatedRequestParams(RequestParams): @@ -72,6 +81,8 @@ class Meta(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ + operation: Operation | None = Field(alias="_operation", default=None) + """Async operation parameters, only used when a notification is sent during an asynchronous tool call.""" RequestParamsT = TypeVar("RequestParamsT", bound=RequestParams | dict[str, Any] | None) @@ -110,6 +121,10 @@ class Result(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ + operation_props: Operation | None = Field(alias="_operation", default=None) + """ + Async operation parameters, only used when a result is sent in response to a request with operation parameters. + """ model_config = ConfigDict(extra="allow") @@ -176,6 +191,9 @@ class ErrorData(BaseModel): sender (e.g. detailed error information, nested errors etc.). """ + operation: Operation | None = Field(alias="_operation", default=None) + """Async operation parameters, only used when an error is sent during an asynchronous tool call.""" + model_config = ConfigDict(extra="allow") @@ -868,6 +886,18 @@ class ToolAnnotations(BaseModel): model_config = ConfigDict(extra="allow") +class InternalToolProperties(BaseModel): + """ + Internal properties for tools that are not serialized in the MCP protocol. + """ + + immediate_result: Any = Field(default=None) + """Function to execute for immediate results in async operations.""" + + keepalive: int | None = Field(default=None) + """Keepalive duration in seconds for async operations.""" + + class Tool(BaseMetadata): """Definition for a tool the client can call.""" @@ -880,6 +910,12 @@ class Tool(BaseMetadata): An optional JSON Schema object defining the structure of the tool's output returned in the structuredContent field of a CallToolResult. """ + invocationMode: Literal["sync", "async"] | None = None + """ + Optional invocation mode for the tool. If not specified, defaults to sync-only. + - "sync": Tool supports synchronous execution only + - "async": Tool supports asynchronous execution only + """ icons: list[Icon] | None = None """An optional list of icons for this tool.""" annotations: ToolAnnotations | None = None @@ -889,6 +925,10 @@ class Tool(BaseMetadata): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ + internal: InternalToolProperties = Field(default_factory=InternalToolProperties, exclude=True) + """ + Internal properties not serialized in MCP protocol. + """ model_config = ConfigDict(extra="allow") @@ -898,11 +938,81 @@ class ListToolsResult(PaginatedResult): tools: list[Tool] +class AsyncRequestProperties(BaseModel): + """Properties for async tool execution requests.""" + + keepAlive: int | None = None + """Number of seconds the client wants the result to be kept available upon completion.""" + model_config = ConfigDict(extra="allow") + + +class AsyncResultProperties(BaseModel): + """Properties for async tool execution results.""" + + token: str + """Server-generated token to use for checking status and retrieving results.""" + keepAlive: int + """Number of seconds the result will be kept available upon completion.""" + model_config = ConfigDict(extra="allow") + + +# Async status checking types +class GetOperationStatusParams(RequestParams): + """Parameters for checking async tool status.""" + + token: str + """Token from the original async tool call.""" + + +class GetOperationStatusRequest(Request[GetOperationStatusParams, Literal["tools/async/status"]]): + """Request to check the status of an async tool call.""" + + method: Literal["tools/async/status"] = "tools/async/status" + params: GetOperationStatusParams + + +"""Status values for async operations.""" +AsyncOperationStatus = Literal["submitted", "working", "input_required", "completed", "canceled", "failed", "unknown"] + + +class GetOperationStatusResult(Result): + """Result of checking async tool status.""" + + status: AsyncOperationStatus + """Current status of the async operation.""" + error: str | None = None + """Error message if status is 'failed'.""" + + +# Async payload retrieval types +class GetOperationPayloadParams(RequestParams): + """Parameters for getting async tool payload.""" + + token: str + """Token from the original async tool call.""" + + +class GetOperationPayloadRequest(Request[GetOperationPayloadParams, Literal["tools/async/result"]]): + """Request to get the result of a completed async tool call.""" + + method: Literal["tools/async/result"] = "tools/async/result" + params: GetOperationPayloadParams + + +class GetOperationPayloadResult(Result): + """Result containing the final async tool call result.""" + + result: "CallToolResult" + """The result of the tool call.""" + + class CallToolRequestParams(RequestParams): """Parameters for calling a tool.""" name: str arguments: dict[str, Any] | None = None + operation_params: AsyncRequestProperties | None = Field(serialization_alias="operation", default=None) + """Optional async execution parameters.""" model_config = ConfigDict(extra="allow") @@ -920,6 +1030,8 @@ class CallToolResult(Result): structuredContent: dict[str, Any] | None = None """An optional JSON object that represents the structured result of the tool call.""" isError: bool = False + operation: AsyncResultProperties | None = None + """Optional async execution information. Present when tool is executed asynchronously.""" class ToolListChangedNotification(Notification[NotificationParams | None, Literal["notifications/tools/list_changed"]]): @@ -1262,6 +1374,8 @@ class ClientRequest( | UnsubscribeRequest | CallToolRequest | ListToolsRequest + | GetOperationStatusRequest + | GetOperationPayloadRequest ] ): pass @@ -1345,6 +1459,8 @@ class ServerResult( | ReadResourceResult | CallToolResult | ListToolsResult + | GetOperationStatusResult + | GetOperationPayloadResult ] ): pass diff --git a/tests/issues/test_176_progress_token.py b/tests/issues/test_176_progress_token.py index eb5f19d64..59fc30bc6 100644 --- a/tests/issues/test_176_progress_token.py +++ b/tests/issues/test_176_progress_token.py @@ -21,7 +21,9 @@ async def test_progress_token_zero_first_call(): request_context = RequestContext( request_id="test-request", + operation_token=None, session=mock_session, + supports_async=False, meta=mock_meta, lifespan_context=None, ) @@ -36,6 +38,27 @@ async def test_progress_token_zero_first_call(): # Verify progress notifications assert mock_session.send_progress_notification.call_count == 3, "All progress notifications should be sent" - mock_session.send_progress_notification.assert_any_call(progress_token=0, progress=0.0, total=10.0, message=None) - mock_session.send_progress_notification.assert_any_call(progress_token=0, progress=5.0, total=10.0, message=None) - mock_session.send_progress_notification.assert_any_call(progress_token=0, progress=10.0, total=10.0, message=None) + mock_session.send_progress_notification.assert_any_call( + progress_token=0, + progress=0.0, + total=10.0, + message=None, + related_request_id="test-request", + related_operation_token=None, + ) + mock_session.send_progress_notification.assert_any_call( + progress_token=0, + progress=5.0, + total=10.0, + message=None, + related_request_id="test-request", + related_operation_token=None, + ) + mock_session.send_progress_notification.assert_any_call( + progress_token=0, + progress=10.0, + total=10.0, + message=None, + related_request_id="test-request", + related_operation_token=None, + ) diff --git a/tests/server/fastmcp/test_immediate_result.py b/tests/server/fastmcp/test_immediate_result.py new file mode 100644 index 000000000..bdfd4a17e --- /dev/null +++ b/tests/server/fastmcp/test_immediate_result.py @@ -0,0 +1,744 @@ +"""Test immediate_result functionality in FastMCP.""" + +import anyio +import pytest + +from mcp.server.fastmcp import FastMCP +from mcp.server.fastmcp.tools import Tool, ToolManager +from mcp.shared.exceptions import McpError +from mcp.shared.memory import create_connected_server_and_client_session +from mcp.types import INVALID_PARAMS, ContentBlock, ErrorData, TextContent + + +class TestImmediateResultValidation: + """Test validation of immediate_result parameter during tool registration.""" + + def test_immediate_result_with_sync_only_tool_fails(self): + """Test that immediate_result fails with sync-only tools.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def sync_tool() -> str: + """A sync tool.""" + return "sync" + + manager = ToolManager() + + # Should raise ValueError when immediate_result is used with sync-only tool + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + manager.add_tool(sync_tool, invocation_modes=["sync"], immediate_result=immediate_fn) + + def test_immediate_result_with_async_tool_succeeds(self): + """Test that immediate_result succeeds with async-compatible tools.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + """An async tool.""" + return "async" + + manager = ToolManager() + + # Should succeed with async-compatible tool + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + assert tool.immediate_result == immediate_fn + assert tool.invocation_modes == ["async"] + + def test_immediate_result_with_hybrid_tool_succeeds(self): + """Test that immediate_result succeeds with hybrid sync/async tools.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def hybrid_tool() -> str: + """A hybrid tool.""" + return "hybrid" + + manager = ToolManager() + + # Should succeed with hybrid tool + tool = manager.add_tool(hybrid_tool, invocation_modes=["sync", "async"], immediate_result=immediate_fn) + assert tool.immediate_result == immediate_fn + assert tool.invocation_modes == ["sync", "async"] + + def test_immediate_result_non_async_callable_fails(self): + """Test that non-async immediate_result functions fail validation.""" + + def sync_immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Should raise ValueError for non-async immediate_result function + with pytest.raises(ValueError, match="immediate_result must be an async callable"): + manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=sync_immediate_fn) # type: ignore + + def test_immediate_result_non_callable_fails(self): + """Test that non-callable immediate_result fails validation.""" + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Should raise ValueError for non-callable immediate_result + with pytest.raises(ValueError, match="immediate_result must be an async callable"): + manager.add_tool(async_tool, invocation_modes=["async"], immediate_result="not_callable") # type: ignore + + def test_tool_from_function_immediate_result_validation(self): + """Test Tool.from_function validates immediate_result correctly.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def sync_tool() -> str: + return "sync" + + # Should fail with sync-only tool + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + Tool.from_function(sync_tool, invocation_modes=["sync"], immediate_result=immediate_fn) + + # Should succeed with async tool + async def async_tool() -> str: + return "async" + + tool = Tool.from_function(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + assert tool.immediate_result == immediate_fn + + +class TestImmediateResultIntegration: + """Test integration of immediate_result with async operations and polling.""" + + @pytest.mark.anyio + async def test_fastmcp_tool_decorator_with_immediate_result(self): + """Test FastMCP tool decorator with immediate_result parameter.""" + + mcp = FastMCP() + + async def immediate_feedback(operation: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"🚀 Starting {operation}...")] + + @mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) + async def long_running_task(operation: str) -> str: + """Perform a long-running task with immediate feedback.""" + await anyio.sleep(0.1) # Simulate work + return f"Task '{operation}' completed!" + + # Test with "next" protocol version to see async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "long_running_task" + assert tools.tools[0].invocationMode == "async" + + # Test that the tool has immediate_result in the internal representation + internal_tool = mcp._tool_manager.get_tool("long_running_task") + assert internal_tool is not None + assert internal_tool.immediate_result == immediate_feedback + + @pytest.mark.anyio + async def test_tool_without_immediate_result_backward_compatibility(self): + """Test that async tools without immediate_result work unchanged.""" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"]) + async def simple_async_tool(message: str) -> str: + """A simple async tool without immediate result.""" + await anyio.sleep(0.1) + return f"Processed: {message}" + + # Test with "next" protocol version to see async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "simple_async_tool" + assert tools.tools[0].invocationMode == "async" + + # Test that the tool has no immediate_result + internal_tool = mcp._tool_manager.get_tool("simple_async_tool") + assert internal_tool is not None + assert internal_tool.immediate_result is None + + @pytest.mark.anyio + async def test_sync_tool_unchanged_behavior(self): + """Test that sync tools continue to work without modification.""" + + mcp = FastMCP() + + @mcp.tool() + def sync_tool(message: str) -> str: + """A simple sync tool.""" + return f"Processed: {message}" + + # Test with old client (sync tools should be visible) + async with create_connected_server_and_client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "sync_tool" + assert tools.tools[0].invocationMode is None # Old clients don't see invocationMode + + # Test with "next" protocol version + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "sync_tool" + assert tools.tools[0].invocationMode == "sync" # New clients see invocationMode + + # Test that the tool has no immediate_result + internal_tool = mcp._tool_manager.get_tool("sync_tool") + assert internal_tool is not None + assert internal_tool.immediate_result is None + assert internal_tool.invocation_modes == ["sync"] + + @pytest.mark.anyio + async def test_multiple_tools_with_mixed_immediate_result(self): + """Test multiple tools with mixed immediate_result configurations.""" + + mcp = FastMCP() + + async def immediate_feedback(message: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"Processing: {message}")] + + @mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) + async def tool_with_immediate(message: str) -> str: + return f"Done: {message}" + + @mcp.tool(invocation_modes=["async"]) + async def tool_without_immediate(message: str) -> str: + return f"Done: {message}" + + @mcp.tool() + def sync_tool(message: str) -> str: + return f"Done: {message}" + + # Test with old client (only sync tools visible) + async with create_connected_server_and_client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 # Only sync tool visible + assert tools.tools[0].name == "sync_tool" + + # Test with "next" protocol version (all tools visible) + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 3 + + tool_names = {tool.name for tool in tools.tools} + assert tool_names == {"tool_with_immediate", "tool_without_immediate", "sync_tool"} + + # Test internal representations + tool_with = mcp._tool_manager.get_tool("tool_with_immediate") + tool_without = mcp._tool_manager.get_tool("tool_without_immediate") + sync_tool_obj = mcp._tool_manager.get_tool("sync_tool") + + assert tool_with is not None and tool_with.immediate_result == immediate_feedback + assert tool_without is not None and tool_without.immediate_result is None + assert sync_tool_obj is not None and sync_tool_obj.immediate_result is None + + +class TestImmediateResultErrorHandling: + """Test error handling for immediate_result functionality.""" + + def test_registration_error_messages(self): + """Test that registration errors have clear messages.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def sync_tool() -> str: + return "sync" + + manager = ToolManager() + + # Test error message for sync-only tool + with pytest.raises(ValueError) as exc_info: + manager.add_tool(sync_tool, invocation_modes=["sync"], immediate_result=immediate_fn) + + error_msg = str(exc_info.value) + assert "immediate_result can only be used with async-compatible tools" in error_msg + assert "Add 'async' to invocation_modes" in error_msg + + def test_fastmcp_decorator_sync_tool_validation(self): + """Test that FastMCP decorator prevents sync tools from using immediate_result.""" + + mcp = FastMCP() + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + # Should raise ValueError when decorating sync tool with immediate_result + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + + @mcp.tool(invocation_modes=["sync"], immediate_result=immediate_fn) + def sync_tool_with_immediate() -> str: + return "sync" + + def test_default_sync_tool_validation(self): + """Test that default sync tools (no invocation_modes specified) cannot use immediate_result.""" + + mcp = FastMCP() + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + # Should raise ValueError when decorating default sync tool with immediate_result + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + + @mcp.tool(immediate_result=immediate_fn) + def default_sync_tool() -> str: + return "sync" + + def test_non_async_callable_error_message(self): + """Test error message for non-async immediate_result function.""" + + def sync_immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + with pytest.raises(ValueError) as exc_info: + manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=sync_immediate_fn) # type: ignore + + error_msg = str(exc_info.value) + assert "immediate_result must be an async callable" in error_msg + + def test_tool_manager_duplicate_tool_handling_with_immediate_result(self): + """Test duplicate tool handling when immediate_result is involved.""" + + async def immediate_fn1() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate1")] + + async def immediate_fn2() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate2")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add first tool with immediate_result + tool1 = manager.add_tool( + async_tool, name="test_tool", invocation_modes=["async"], immediate_result=immediate_fn1 + ) + + # Add duplicate tool with different immediate_result (should return existing) + tool2 = manager.add_tool( + async_tool, name="test_tool", invocation_modes=["async"], immediate_result=immediate_fn2 + ) + + # Should return the same tool (first one registered) + assert tool1 is tool2 + assert tool1.immediate_result == immediate_fn1 + + +class TestImmediateResultPerformance: + """Test performance aspects of immediate_result functionality.""" + + def test_no_performance_impact_without_immediate_result(self): + """Test that tools without immediate_result have no performance impact.""" + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add tool without immediate_result + tool = manager.add_tool(async_tool, invocation_modes=["async"]) + + # Verify no immediate_result overhead + assert tool.immediate_result is None + assert "async" in tool.invocation_modes + + @pytest.mark.anyio + async def test_immediate_result_function_isolation(self): + """Test that immediate_result functions are isolated from main tool execution.""" + + execution_order: list[str] = [] + + async def immediate_fn(message: str) -> list[ContentBlock]: + execution_order.append("immediate") + return [TextContent(type="text", text=f"Processing: {message}")] + + async def async_tool(message: str) -> str: + execution_order.append("main") + await anyio.sleep(0.1) + return f"Completed: {message}" + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + + # Test that immediate function can be called independently + await immediate_fn("test") + assert execution_order == ["immediate"] + + # Reset and test main function + execution_order.clear() + await tool.run({"message": "test"}) + assert execution_order == ["main"] + + +class TestImmediateResultRuntimeErrors: + """Test runtime error handling when immediate_result functions raise exceptions.""" + + @pytest.mark.anyio + async def test_immediate_result_registration_and_storage(self): + """Test that immediate_result functions are properly registered, stored, and executed.""" + + async def working_immediate_fn(message: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"Processing: {message}")] + + async def async_tool(message: str) -> str: + await anyio.sleep(0.1) + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=working_immediate_fn) + async def tool_with_working_immediate(message: str) -> str: + """Tool with working immediate result.""" + return await async_tool(message) + + # Verify the tool was registered with immediate_result + internal_tool = mcp._tool_manager.get_tool("tool_with_working_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == working_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # Call the tool - should return operation token + result = await client.call_tool("tool_with_working_immediate", {"message": "test"}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # The immediate result should be in the initial response content + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert content.text == "Processing: test" + + # Poll for completion to verify main tool execution + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + final_result = await client.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + final_content = final_result.result.content[0] + assert final_content.type == "text" + assert final_content.text == "Completed: test" + break + elif status.status == "failed": + pytest.fail(f"Tool execution failed: {status}") + await anyio.sleep(0.01) + + @pytest.mark.anyio + async def test_immediate_result_exception_handling(self): + """Test that exceptions in immediate_result are properly handled during tool execution.""" + + async def failing_immediate_fn(message: str) -> list[ContentBlock]: + raise ValueError(f"Immediate result failed for: {message}") + + async def async_tool(message: str) -> str: + await anyio.sleep(0.1) + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=failing_immediate_fn) + async def tool_with_failing_immediate(message: str) -> str: + """Tool with failing immediate result.""" + return await async_tool(message) + + # Verify the tool was registered with the failing immediate_result + internal_tool = mcp._tool_manager.get_tool("tool_with_failing_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == failing_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to immediate_result exception + result = await client.call_tool("tool_with_failing_immediate", {"message": "test"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Immediate result failed for: test" in content.text + + @pytest.mark.anyio + async def test_immediate_result_invalid_return_type_error(self): + """Test that immediate_result returning invalid type is handled properly.""" + + async def invalid_return_immediate_fn(message: str) -> str: # Wrong return type + return f"Invalid return: {message}" # Should return list[ContentBlock] + + async def async_tool(message: str) -> str: + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=invalid_return_immediate_fn) # type: ignore + async def tool_with_invalid_immediate(message: str) -> str: + """Tool with invalid immediate result return type.""" + return await async_tool(message) + + # Verify the tool was registered (type checking is not enforced at runtime) + internal_tool = mcp._tool_manager.get_tool("tool_with_invalid_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == invalid_return_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to invalid return type + result = await client.call_tool("tool_with_invalid_immediate", {"message": "test"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "immediate_result must return list[ContentBlock]" in content.text + + @pytest.mark.anyio + async def test_immediate_result_async_exception_handling(self): + """Test that async exceptions in immediate_result are properly handled.""" + + async def async_failing_immediate_fn(operation: str) -> list[ContentBlock]: + await anyio.sleep(0.01) # Make it truly async + raise RuntimeError(f"Async immediate failure: {operation}") + + async def async_tool(operation: str) -> str: + await anyio.sleep(0.1) + return f"Operation {operation} completed" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=async_failing_immediate_fn) + async def tool_with_async_failing_immediate(operation: str) -> str: + """Tool with async failing immediate result.""" + return await async_tool(operation) + + # Verify the tool was registered + internal_tool = mcp._tool_manager.get_tool("tool_with_async_failing_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == async_failing_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to immediate_result exception + result = await client.call_tool("tool_with_async_failing_immediate", {"operation": "test_op"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Async immediate failure: test_op" in content.text + + @pytest.mark.anyio + async def test_immediate_result_error_prevents_main_tool_execution(self): + """Test that immediate_result errors prevent the main tool from executing. + + When immediate_result fails, no async operation should be created and the main + tool function should not be executed. + """ + + call_count = 0 + + async def failing_immediate_fn(message: str) -> list[ContentBlock]: + raise ValueError("Immediate failed") + + async def async_tool(message: str) -> str: + nonlocal call_count + call_count += 1 + return f"Tool executed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=failing_immediate_fn) + async def tool_with_failing_immediate(message: str) -> str: + """Tool with failing immediate result.""" + return await async_tool(message) + + # Verify the tool was registered + internal_tool = mcp._tool_manager.get_tool("tool_with_failing_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == failing_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to immediate_result exception + result = await client.call_tool("tool_with_failing_immediate", {"message": "test"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Immediate failed" in content.text + + # Verify main tool was NOT executed due to immediate_result failure + assert call_count == 0 + + @pytest.mark.anyio + async def test_immediate_result_mcp_error_passthrough(self): + """Test that McpError from immediate_result is passed through with original error details.""" + + async def mcp_error_immediate_fn(message: str) -> list[ContentBlock]: + raise McpError(ErrorData(code=INVALID_PARAMS, message=f"Custom MCP error: {message}")) + + async def async_tool(message: str) -> str: + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=mcp_error_immediate_fn) + async def tool_with_mcp_error_immediate(message: str) -> str: + """Tool with immediate result that raises McpError.""" + return await async_tool(message) + + # Verify the tool was registered + internal_tool = mcp._tool_manager.get_tool("tool_with_mcp_error_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == mcp_error_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result with the original McpError details + result = await client.call_tool("tool_with_mcp_error_immediate", {"message": "test"}) + + # Verify error result preserves the original McpError + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + # The original McpError should be preserved, not wrapped in "Immediate result execution failed" + assert "Custom MCP error: test" in content.text + + @pytest.mark.anyio + async def test_generic_exception_wrapped_in_mcp_error(self): + """Test that generic exceptions from immediate_result are wrapped in McpError with INTERNAL_ERROR code.""" + + async def failing_immediate_fn(message: str) -> list[ContentBlock]: + raise ValueError(f"Generic error: {message}") + + async def async_tool(message: str) -> str: + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=failing_immediate_fn) + async def tool_with_failing_immediate(message: str) -> str: + """Tool with failing immediate result.""" + return await async_tool(message) + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result with wrapped exception + result = await client.call_tool("tool_with_failing_immediate", {"message": "test"}) + + # Verify error result wraps the exception + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Generic error: test" in content.text + + +class TestImmediateResultMetadata: + """Test metadata handling for immediate_result functionality.""" + + def test_immediate_result_stored_in_tool_object(self): + """Test that immediate_result function is stored in Tool object.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + + # Verify immediate_result is stored in the Tool object + assert tool.immediate_result == immediate_fn + assert callable(tool.immediate_result) + + def test_tool_meta_field_preservation(self): + """Test that existing meta field is preserved when immediate_result is added.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add tool with both meta and immediate_result + custom_meta = {"custom_key": "custom_value"} + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn, meta=custom_meta) + + # Verify both meta and immediate_result are preserved + assert tool.immediate_result == immediate_fn + assert tool.meta is not None + assert tool.meta["custom_key"] == "custom_value" + + def test_keep_alive_and_immediate_result_compatibility(self): + """Test that keep_alive and immediate_result work together.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add tool with both keep_alive and immediate_result + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn, keep_alive=1800) + + # Verify both are set correctly + assert tool.immediate_result == immediate_fn + assert tool.meta is not None + assert tool.meta["_keep_alive"] == 1800 + # immediate_result is no longer stored in meta, it's a direct field on the Tool object + + def test_immediate_result_stored_as_direct_field(self): + """Test that immediate_result function is stored as a direct field on the Tool object.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + + # Verify immediate_result is stored as a direct field on the Tool object + assert tool.immediate_result == immediate_fn + assert callable(tool.immediate_result) + # immediate_result is no longer stored in meta field diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index dc88cc025..e7b2e670e 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -11,17 +11,24 @@ # pyright: reportUnknownArgumentType=false import json +import logging import multiprocessing import socket import time from collections.abc import Generator +import anyio import pytest import uvicorn from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import AnyUrl from examples.snippets.servers import ( + async_tool_basic, + async_tool_elicitation, + async_tool_immediate, + async_tool_progress, + async_tool_sampling, basic_prompt, basic_resource, basic_tool, @@ -61,6 +68,8 @@ ToolListChangedNotification, ) +logger = logging.getLogger(__name__) + class NotificationCollector: """Collects notifications from the server for testing.""" @@ -104,7 +113,17 @@ def server_url(server_port: int) -> str: def run_server_with_transport(module_name: str, port: int, transport: str) -> None: """Run server with specified transport.""" # Get the MCP instance based on module name - if module_name == "basic_tool": + if module_name == "async_tool_basic": + mcp = async_tool_basic.mcp + elif module_name == "async_tool_elicitation": + mcp = async_tool_elicitation.mcp + elif module_name == "async_tool_immediate": + mcp = async_tool_immediate.mcp + elif module_name == "async_tool_progress": + mcp = async_tool_progress.mcp + elif module_name == "async_tool_sampling": + mcp = async_tool_sampling.mcp + elif module_name == "basic_tool": mcp = basic_tool.mcp elif module_name == "basic_resource": mcp = basic_resource.mcp @@ -244,6 +263,12 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par action="accept", content={"checkAlternative": True, "alternativeDate": "2024-12-26"}, ) + # For async elicitation tool test + elif "data_migration" in params.message: + return ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "high"}, + ) else: return ElicitResult(action="decline") @@ -663,6 +688,186 @@ async def test_fastmcp_quickstart(server_transport: str, server_url: str) -> Non assert resource_result.contents[0].text == "Hello, Alice!" +# Test async tools example with "next" protocol +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + # Skip SSE for async tools - SSE client has issues with long polling in test environment + # causing BrokenResourceError during async operation status polling + # ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_basic(server_transport: str, server_url: str) -> None: + """Test async tools functionality with 'next' protocol version.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Basic" + + # Test sync tool (should work normally) + sync_result = await session.call_tool("process_text_sync", {"text": "hello"}) + assert len(sync_result.content) == 1 + assert isinstance(sync_result.content[0], TextContent) + assert sync_result.content[0].text == "Processed: HELLO" + + # Test async-only tool (should return operation token) + async_result = await session.call_tool("analyze_data", {"dataset": "test data"}) + assert async_result.operation is not None + token = async_result.operation.token + + # Poll for completion with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Analysis results for test data: 95% accuracy achieved" in content.text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") + + # Test hybrid tool (process_text should only run in async mode in this version) + hybrid_result = await session.call_tool("process_text", {"text": "world"}) + assert hybrid_result.operation is not None + token = hybrid_result.operation.token + + # Poll for completion with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_hybrid_result = await session.get_operation_result(token) + assert not final_hybrid_result.result.isError + assert len(final_hybrid_result.result.content) == 1 + assert isinstance(final_hybrid_result.result.content[0], TextContent) + assert "Processed: WORLD" in final_hybrid_result.result.content[0].text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") + + +# Test async tools example with legacy protocol +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + # ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_basic_legacy_protocol(server_transport: str, server_url: str) -> None: + """Test async tools functionality with '2025-06-18' protocol version.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="2025-06-18") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Basic" + + # Test sync tool (should work normally) + sync_result = await session.call_tool("process_text", {"text": "hello"}) + assert len(sync_result.content) == 1 + assert isinstance(sync_result.content[0], TextContent) + assert sync_result.content[0].text == "Processed: HELLO" + + # Test async-only tool (executes synchronously with legacy protocol) + async_result = await session.call_tool("analyze_data", {"dataset": "test data"}) + assert async_result.operation is None # No operation token with legacy protocol + assert len(async_result.content) == 1 + content = async_result.content[0] + assert isinstance(content, TextContent) + assert "Analysis results for test data: 95% accuracy achieved" in content.text + + # Test hybrid tool (should work as sync) + hybrid_result = await session.call_tool("process_text", {"text": "hello"}) + assert len(hybrid_result.content) == 1 + assert isinstance(hybrid_result.content[0], TextContent) + assert "Processed: HELLO" in hybrid_result.content[0].text + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + # ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_reconnection(server_transport: str, server_url: str) -> None: + """Test that async operations can be retrieved after reconnecting with a new session.""" + transport = server_transport + client_cm1 = create_client_for_transport(transport, server_url) + + # Start async operation in first session + async with client_cm1 as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session1: + await session1.initialize() + + # Start async operation + result = await session1.call_tool("process_text", {"text": "test data"}) + assert result.operation is not None + token = result.operation.token + + # Reconnect with new session and retrieve result + client_cm2 = create_client_for_transport(transport, server_url) + async with client_cm2 as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session2: + await session2.initialize() + + # Poll for completion in new session + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session2.get_operation_status(token) + if status.status == "completed": + final_result = await session2.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + break + elif status.status == "failed": + pytest.fail(f"Operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") + + # Test structured output example @pytest.mark.anyio @pytest.mark.parametrize( @@ -697,3 +902,331 @@ async def test_structured_output(server_transport: str, server_url: str) -> None assert "sunny" in result_text # condition assert "45" in result_text # humidity assert "5.2" in result_text # wind_speed + + +# Test immediate_result functionality integration +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_immediate", "sse"), + ("async_tool_immediate", "streamable-http"), + ], + indirect=True, +) +async def test_immediate_result_integration(server_transport: str, server_url: str) -> None: + """Test complete flow from tool registration to immediate result execution.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Immediate" + + # Test tool with immediate_result + immediate_result = await session.call_tool("long_analysis", {"operation": "data_processing"}) + + # Verify immediate result is returned in content + assert len(immediate_result.content) == 1 + assert isinstance(immediate_result.content[0], TextContent) + assert "Starting data_processing operation. This will take a moment." in immediate_result.content[0].text + + # Verify async operation is created + assert immediate_result.operation is not None + token = immediate_result.operation.token + + # Poll for final result with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Analysis 'data_processing' completed with detailed results" in content.text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), + ], + indirect=True, +) +async def test_immediate_result_backward_compatibility(server_transport: str, server_url: str) -> None: + """Test that existing async tools without immediate_result work unchanged.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Basic" + + # Test async tool without immediate_result (should have empty content initially) + async_result = await session.call_tool("analyze_data", {"dataset": "test_data"}) + + # Should have empty content array (no immediate result) + assert len(async_result.content) == 0 + + # Should still have async operation + assert async_result.operation is not None + token = async_result.operation.token + + # Poll for final result with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Analysis results for test_data: 95% accuracy achieved" in content.text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") + + +# Test async progress notifications +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_progress", "sse"), + ("async_tool_progress", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_progress(server_transport: str, server_url: str) -> None: + """Test async tools with progress notifications.""" + transport = server_transport + collector = NotificationCollector() + + async def message_handler(message: RequestResponder[ServerRequest, ClientResult] | ServerNotification | Exception): + await collector.handle_generic_notification(message) + if isinstance(message, Exception): + raise message + + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, write_stream, protocol_version="next", message_handler=message_handler + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Progress" + + # Test batch processing with progress + progress_updates = [] + + async def progress_callback(progress: float, total: float | None, message: str | None) -> None: + progress_updates.append((progress, total, message)) + + batch_result = await session.call_tool( + "batch_process", + {"items": ["apple", "banana", "cherry"]}, + progress_callback=progress_callback, + ) + assert batch_result.operation is not None + token = batch_result.operation.token + + # Poll for completion + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + + # Check structured content + if final_result.result.structuredContent: + assert isinstance(final_result.result.structuredContent, dict) + assert "result" in final_result.result.structuredContent + processed_items = final_result.result.structuredContent["result"] + assert len(processed_items) == 3 + assert all("PROCESSED_" in item for item in processed_items) + break + elif status.status == "failed": + pytest.fail(f"Batch operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.3) + else: + pytest.fail("Batch operation timed out") + + # Verify progress updates were received + assert len(progress_updates) == 3 + for i, (progress, total, message) in enumerate(progress_updates): + expected_progress = (i + 1) / 3 + assert abs(progress - expected_progress) < 0.01 + assert total == 1.0 + assert f"Processed {i + 1}/3" in message + + +# Test async elicitation +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_elicitation", "streamable-http"), # Only test streamable-http for elicitation + ], + indirect=True, +) +async def test_async_tool_elicitation(server_transport: str, server_url: str) -> None: + """Test async tools with elicitation.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async def test_elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): + """Handle elicitation requests from the server.""" + if "data_migration" in params.message: + return ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "high"}, + ) + elif "file operation" in params.message.lower(): + return ElicitResult( + action="accept", + content={"confirm_operation": True, "backup_first": True}, + ) + else: + return ElicitResult(action="decline") + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, + write_stream, + protocol_version="next", + elicitation_callback=test_elicitation_callback, + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Elicitation" + + # Test process with confirmation + elicit_result = await session.call_tool("process_with_confirmation", {"operation": "data_migration"}) + assert elicit_result.operation is not None + token = elicit_result.operation.token + + # Poll for completion + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Operation 'data_migration' completed successfully with high priority" in content.text + break + elif status.status == "failed": + pytest.fail(f"Elicitation operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.3) + else: + pytest.fail("Elicitation operation timed out") + + +# Test async sampling +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_sampling", "sse"), + ("async_tool_sampling", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_sampling(server_transport: str, server_url: str) -> None: + """Test async tools with sampling (LLM interaction).""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async def test_sampling_callback( + context: RequestContext[ClientSession, None], params: CreateMessageRequestParams + ) -> CreateMessageResult: + """Handle sampling requests from the server.""" + return CreateMessageResult( + role="assistant", + content=TextContent(type="text", text="This is a simulated LLM response for testing"), + model="test-model", + ) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, + write_stream, + protocol_version="next", + sampling_callback=test_sampling_callback, + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Sampling" + + # Test content generation + sampling_result = await session.call_tool( + "generate_content", {"topic": "artificial intelligence", "content_type": "poem"} + ) + assert sampling_result.operation is not None + token = sampling_result.operation.token + + # Poll for completion + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Generated poem about 'artificial intelligence'" in content.text + assert "This is a simulated LLM response" in content.text + break + elif status.status == "failed": + pytest.fail(f"Sampling operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.3) + else: + pytest.fail("Sampling operation timed out") diff --git a/tests/server/fastmcp/test_server.py b/tests/server/fastmcp/test_server.py index 8caa3b1f6..c72b0eddf 100644 --- a/tests/server/fastmcp/test_server.py +++ b/tests/server/fastmcp/test_server.py @@ -3,6 +3,7 @@ from typing import TYPE_CHECKING, Any from unittest.mock import patch +import anyio import pytest from pydantic import AnyUrl, BaseModel from starlette.routing import Mount, Route @@ -677,6 +678,270 @@ async def test_remove_tool_and_call(self): assert isinstance(content, TextContent) assert "Unknown tool" in content.text + @pytest.mark.anyio + async def test_list_tools_invocation_mode_sync(self): + """Test that sync tools have proper invocationMode field.""" + mcp = FastMCP() + + @mcp.tool() + def sync_tool(x: int) -> int: + """A sync tool.""" + return x * 2 + + async with client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + tool = next(t for t in tools.tools if t.name == "sync_tool") + # Sync tools should not have invocationMode field (None) for old clients + assert tool.invocationMode is None + + @pytest.mark.anyio + async def test_list_tools_invocation_mode_async_only(self): + """Test that async-only tools have proper invocationMode field.""" + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"]) + async def async_only_tool(x: int) -> int: + """An async-only tool.""" + return x * 2 + + async with client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + # Async-only tools should be filtered out for old clients + async_tools = [t for t in tools.tools if t.name == "async_only_tool"] + assert len(async_tools) == 0 + + @pytest.mark.anyio + async def test_list_tools_invocation_mode_hybrid(self): + """Test that hybrid tools have proper invocationMode field.""" + mcp = FastMCP() + + @mcp.tool(invocation_modes=["sync", "async"]) + def hybrid_tool(x: int) -> int: + """A hybrid tool.""" + return x * 2 + + async with client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + tool = next(t for t in tools.tools if t.name == "hybrid_tool") + # Hybrid tools should not have invocationMode field (None) for old clients + assert tool.invocationMode is None + + @pytest.mark.anyio + async def test_async_tool_call_basic(self): + """Test basic async tool call functionality.""" + mcp = FastMCP("AsyncTest") + + @mcp.tool(invocation_modes=["async"]) + async def async_add(a: int, b: int) -> int: + """Add two numbers asynchronously.""" + await anyio.sleep(0.01) # Simulate async work + return a + b + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + result = await client.call_tool("async_add", {"a": 5, "b": 3}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # Poll for completion + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + final_result = await client.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert content.text == "8" + break + elif status.status == "failed": + pytest.fail(f"Operation failed: {status.error}") + await anyio.sleep(0.01) + + @pytest.mark.anyio + async def test_async_tool_call_structured_output(self): + """Test async tool call with structured output.""" + mcp = FastMCP("AsyncTest") + + class AsyncResult(BaseModel): + value: int + processed: bool = True + + @mcp.tool(invocation_modes=["async"]) + async def async_structured_tool(x: int) -> AsyncResult: + """Process data and return structured result.""" + await anyio.sleep(0.01) # Simulate async work + return AsyncResult(value=x * 2) + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + result = await client.call_tool("async_structured_tool", {"x": 21}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # Poll for completion + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + final_result = await client.get_operation_result(token) + assert not final_result.result.isError + assert final_result.result.structuredContent is not None + assert final_result.result.structuredContent == {"value": 42, "processed": True} + break + elif status.status == "failed": + pytest.fail(f"Operation failed: {status.error}") + await anyio.sleep(0.01) + + @pytest.mark.anyio + async def test_async_tool_call_validation_error(self): + """Test async tool call with server-side validation error.""" + mcp = FastMCP("AsyncTest") + + @mcp.tool(invocation_modes=["async"]) + async def async_invalid_tool() -> list[int]: + """Tool that returns invalid structured output.""" + await anyio.sleep(0.01) # Simulate async work + return [1, 2, 3, [4]] # type: ignore + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + result = await client.call_tool("async_invalid_tool", {}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # Poll for completion - should fail due to validation error + while True: + status = await client.get_operation_status(token) + if status.status == "failed": + # Operation should fail due to validation error + assert status.error is not None + break + elif status.status == "completed": + pytest.fail("Operation should have failed due to validation error") + await anyio.sleep(0.01) + + @pytest.mark.anyio + async def test_tool_keep_alive_validation_no_sync_only(self): + """Test that keep_alive validation prevents use on sync-only tools.""" + mcp = FastMCP() + + # Should raise error when keep_alive is used on sync-only tool + with pytest.raises(ValueError, match="keep_alive parameter can only be used with async-compatible tools"): + + @mcp.tool(keep_alive=1800) # Custom keep_alive on sync-only tool + def sync_only_tool(x: int) -> str: + return str(x) + + @pytest.mark.anyio + async def test_tool_keep_alive_default_async_tools(self): + """Test that async tools get correct default keep_alive.""" + mcp = FastMCP() + + # Async tools should get default keep_alive of 3600 + @mcp.tool(invocation_modes=["async"]) # No keep_alive specified + def async_tool_default(x: int) -> str: + return str(x) + + tools = mcp._tool_manager.list_tools() + tool = next(t for t in tools if t.name == "async_tool_default") + assert tool.meta is not None + assert tool.meta["_keep_alive"] == 3600 + + @pytest.mark.anyio + async def test_async_tool_keep_alive_expiry(self): + """Test that async operations expire after keep_alive duration.""" + mcp = FastMCP("AsyncKeepAliveTest") + + @mcp.tool(invocation_modes=["async"], keep_alive=1) # 1 second keep_alive + def short_lived_tool(data: str) -> str: + return f"Processed: {data}" + + # Check that the tool has correct keep_alive + tools = mcp._tool_manager.list_tools() + tool = next(t for t in tools if t.name == "short_lived_tool") + assert tool.meta is not None + assert tool.meta["_keep_alive"] == 1 + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + # First list tools to populate keep_alive mapping + await client.list_tools() + + # Call the async tool + result = await client.call_tool("short_lived_tool", {"data": "test"}) + + # Should get operation token + assert result.operation is not None + token = result.operation.token + assert result.operation.keepAlive == 1 + + # Wait for operation to complete + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + break + + # Get result while still alive + operation_result = await client.get_operation_result(token) + assert operation_result.result is not None + + # Wait for keep_alive to expire (1 second + buffer) + await anyio.sleep(1.2) + + # Operation should now be expired/unavailable + with pytest.raises(Exception): # Should raise error for expired operation + await client.get_operation_result(token) + + @pytest.mark.anyio + async def test_async_tool_keep_alive_expiry_structured_content(self): + """Test that async operations with structured content expire correctly.""" + mcp = FastMCP("AsyncKeepAliveStructuredTest") + + class ProcessResult(BaseModel): + status: str + data: str + count: int + + @mcp.tool(invocation_modes=["async"], keep_alive=1) # 1 second keep_alive + def structured_tool(input_data: str) -> ProcessResult: + return ProcessResult(status="success", data=f"Processed: {input_data}", count=42) + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + # First list tools to populate keep_alive mapping + await client.list_tools() + + # Call the async tool + result = await client.call_tool("structured_tool", {"input_data": "test"}) + + # Should get operation token + assert result.operation is not None + token = result.operation.token + assert result.operation.keepAlive == 1 + + # Wait for operation to complete + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + break + + # Get structured result while still alive + operation_result = await client.get_operation_result(token) + assert operation_result.result is not None + assert operation_result.result.structuredContent is not None + structured_data = operation_result.result.structuredContent + assert structured_data["status"] == "success" + assert structured_data["data"] == "Processed: test" + assert structured_data["count"] == 42 + + # Wait for keep_alive to expire (1 second + buffer) + await anyio.sleep(1.2) + + # Operation should now be expired/unavailable - validation should fail gracefully + with pytest.raises(Exception): # Should raise error for expired operation + await client.get_operation_result(token) + class TestServerResources: @pytest.mark.anyio diff --git a/tests/server/fastmcp/test_tool_manager.py b/tests/server/fastmcp/test_tool_manager.py index 71884fba2..d047bf4bf 100644 --- a/tests/server/fastmcp/test_tool_manager.py +++ b/tests/server/fastmcp/test_tool_manager.py @@ -54,6 +54,7 @@ class AddArguments(ArgModelBase): parameters=AddArguments.model_json_schema(), context_kwarg=None, annotations=None, + immediate_result=None, ) manager = ToolManager(tools=[original_tool]) saved_tool = manager.get_tool("sum") @@ -178,6 +179,55 @@ def f(x: int) -> int: manager.add_tool(f) assert "Tool already exists: f" not in caplog.text + def test_invocation_modes_default(self): + """Test that tools default to sync mode when no invocation_modes specified.""" + + def sync_tool(x: int) -> int: + """A sync tool.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(sync_tool) + + assert tool.invocation_modes == ["sync"] + + def test_invocation_modes_async_only(self): + """Test async-only tool creation.""" + + async def async_tool(x: int) -> int: + """An async-only tool.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"]) + + assert tool.invocation_modes == ["async"] + assert tool.is_async is True + + def test_invocation_modes_hybrid(self): + """Test hybrid sync/async tool creation.""" + + def hybrid_tool(x: int) -> int: + """A hybrid tool that supports both modes.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(hybrid_tool, invocation_modes=["sync", "async"]) + + assert tool.invocation_modes == ["sync", "async"] + + def test_invocation_modes_explicit_sync(self): + """Test explicitly setting sync mode.""" + + def explicit_sync_tool(x: int) -> int: + """An explicitly sync tool.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(explicit_sync_tool, invocation_modes=["sync"]) + + assert tool.invocation_modes == ["sync"] + class TestCallTools: @pytest.mark.anyio @@ -584,6 +634,84 @@ def get_user() -> UserOutput: } assert tool.output_schema == expected_schema + def test_tool_meta_property(self): + """Test that Tool.meta property works correctly.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, meta={"foo": "bar"}) + + # Test that meta is populated + expected_meta = { + "foo": "bar", + } + assert tool.meta == expected_meta + + def test_tool_keep_alive_property_sync(self): + """Test that keep_alive property works correctly with sync-only tools.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + + # Should raise error when keep_alive is used on sync-only tool + with pytest.raises(ValueError, match="keep_alive parameter can only be used with async-compatible tools"): + manager.add_tool(double_number, invocation_modes=["sync"], keep_alive=1) + + def test_tool_keep_alive_property_async(self): + """Test that keep_alive property works correctly with async-only tools.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, invocation_modes=["async"], keep_alive=1) + + # Test that meta is populated and has the keepalive stashed in it + expected_meta = { + "_keep_alive": 1, + } + assert tool.meta == expected_meta + + def test_tool_keep_alive_property_hybrid(self): + """Test that keep_alive property works correctly with hybrid sync/async tools.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, invocation_modes=["sync", "async"], keep_alive=1) + + # Test that meta is populated and has the keepalive stashed in it + expected_meta = { + "_keep_alive": 1, + } + assert tool.meta == expected_meta + + def test_tool_keep_alive_property_meta(self): + """Test that keep_alive property works correctly with existing metadata defined.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, invocation_modes=["async"], keep_alive=1, meta={"foo": "bar"}) + + # Test that meta is populated and has the keepalive stashed in it + expected_meta = { + "foo": "bar", + "_keep_alive": 1, + } + assert tool.meta == expected_meta + @pytest.mark.anyio async def test_tool_with_dict_str_any_output(self): """Test tool with dict[str, Any] return type.""" @@ -746,3 +874,53 @@ def test_func() -> str: # Remove with correct case manager.remove_tool("test_func") assert manager.get_tool("test_func") is None + + +class TestInvocationModes: + """Test invocation modes functionality.""" + + def test_invocation_mode_type_safety(self): + """Test InvocationMode literal type validation.""" + from mcp.server.fastmcp.tools.base import InvocationMode + + # Valid modes should work + valid_modes: list[InvocationMode] = ["sync", "async"] + assert valid_modes == ["sync", "async"] + + def test_tool_from_function_with_invocation_modes(self): + """Test Tool.from_function with invocation_modes parameter.""" + from mcp.server.fastmcp.tools.base import Tool + + def test_tool(x: int) -> int: + return x + + # Test default behavior + tool_default = Tool.from_function(test_tool) + assert tool_default.invocation_modes == ["sync"] + + # Test explicit sync + tool_sync = Tool.from_function(test_tool, invocation_modes=["sync"]) + assert tool_sync.invocation_modes == ["sync"] + + # Test async only + tool_async = Tool.from_function(test_tool, invocation_modes=["async"]) + assert tool_async.invocation_modes == ["async"] + + # Test hybrid + tool_hybrid = Tool.from_function(test_tool, invocation_modes=["sync", "async"]) + assert tool_hybrid.invocation_modes == ["sync", "async"] + + def test_tool_manager_invocation_modes_parameter(self): + """Test ToolManager.add_tool with invocation_modes parameter.""" + manager = ToolManager() + + def test_tool(x: int) -> int: + return x + + # Test that invocation_modes parameter is passed through + tool = manager.add_tool(test_tool, invocation_modes=["async"]) + assert tool.invocation_modes == ["async"] + + # Test default behavior when None + tool_default = manager.add_tool(test_tool, name="test_tool_default") + assert tool_default.invocation_modes == ["sync"] diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py new file mode 100644 index 000000000..498ec737a --- /dev/null +++ b/tests/server/test_lowlevel_async_operations.py @@ -0,0 +1,447 @@ +"""Test async operations integration in lowlevel Server.""" + +import time +from typing import cast + +import pytest + +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.shared.async_operations import ServerAsyncOperationManager +from mcp.shared.exceptions import McpError + + +class TestLowlevelServerAsyncOperations: + """Test lowlevel Server async operations integration.""" + + @pytest.mark.anyio + async def test_check_async_status_invalid_token(self): + """Test get_operation_status handler with invalid token.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Register the handler + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + # This function is not actually called due to built-in logic + return types.GetOperationStatusResult(status="unknown") + + # Test invalid token + invalid_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token="invalid_token")) + + handler = server.request_handlers[types.GetOperationStatusRequest] + + with pytest.raises(McpError) as exc_info: + await handler(invalid_request) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Invalid token" + + @pytest.mark.anyio + async def test_check_async_status_expired_token(self): + """Test get_operation_status handler with expired token.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + return types.GetOperationStatusResult(status="unknown") + + # Create and complete operation with short keepAlive + operation = await manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") + await manager.complete_operation(operation.token, types.CallToolResult(content=[])) + + # Make it expired + operation.resolved_at = time.time() - 2 + + expired_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) + + handler = server.request_handlers[types.GetOperationStatusRequest] + + with pytest.raises(McpError) as exc_info: + await handler(expired_request) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Token expired" + + @pytest.mark.anyio + async def test_check_async_status_valid_operation(self): + """Test get_operation_status handler with valid operation.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + return types.GetOperationStatusResult(status="unknown") + + # Create valid operation + operation = await manager.create_operation("test_tool", {}, session_id="session1") + await manager.mark_working(operation.token) + + valid_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) + + handler = server.request_handlers[types.GetOperationStatusRequest] + + result = await handler(valid_request) + + assert isinstance(result, types.ServerResult) + status_result = cast(types.GetOperationStatusResult, result.root) + assert status_result.status == "working" + assert status_result.error is None + + @pytest.mark.anyio + async def test_check_async_status_failed_operation(self): + """Test get_operation_status handler with failed operation.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + return types.GetOperationStatusResult(status="unknown") + + # Create and fail operation + operation = await manager.create_operation("test_tool", {}, session_id="session1") + await manager.fail_operation(operation.token, "Something went wrong") + + failed_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) + + handler = server.request_handlers[types.GetOperationStatusRequest] + + result = await handler(failed_request) + + assert isinstance(result, types.ServerResult) + status_result = cast(types.GetOperationStatusResult, result.root) + assert status_result.status == "failed" + assert status_result.error == "Something went wrong" + + @pytest.mark.anyio + async def test_get_async_result_invalid_token(self): + """Test get_operation_result handler with invalid token.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) + + invalid_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token="invalid_token") + ) + + handler = server.request_handlers[types.GetOperationPayloadRequest] + + with pytest.raises(McpError) as exc_info: + await handler(invalid_request) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Invalid token" + + @pytest.mark.anyio + async def test_get_async_result_expired_token(self): + """Test get_operation_result handler with expired token.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) + + # Create and complete operation with short keepAlive + operation = await manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") + await manager.complete_operation(operation.token, types.CallToolResult(content=[])) + + # Make it expired + operation.resolved_at = time.time() - 2 + + expired_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=operation.token) + ) + + handler = server.request_handlers[types.GetOperationPayloadRequest] + + with pytest.raises(McpError) as exc_info: + await handler(expired_request) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Token expired" + + @pytest.mark.anyio + async def test_get_async_result_not_completed(self): + """Test get_operation_result handler with non-completed operation.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) + + # Create operation that's still working + operation = await manager.create_operation("test_tool", {}, session_id="session1") + await manager.mark_working(operation.token) + + working_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=operation.token) + ) + + handler = server.request_handlers[types.GetOperationPayloadRequest] + + with pytest.raises(McpError) as exc_info: + await handler(working_request) + + assert exc_info.value.error.code == -32600 + assert exc_info.value.error.message == "Operation not completed (status: working)" + + @pytest.mark.anyio + async def test_get_async_result_completed_with_result(self): + """Test get_operation_result handler with completed operation.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) + + # Create and complete operation with result + operation = await manager.create_operation("test_tool", {}, session_id="session1") + result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) + await manager.complete_operation(operation.token, result) + + completed_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=operation.token) + ) + + handler = server.request_handlers[types.GetOperationPayloadRequest] + + response = await handler(completed_request) + + assert isinstance(response, types.ServerResult) + payload_result = cast(types.GetOperationPayloadResult, response.root) + assert payload_result.result == result + + +class TestCancellationLogic: + """Test cancellation logic for async operations.""" + + @pytest.mark.anyio + async def test_handle_cancelled_notification(self): + """Test handling of cancelled notifications.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create an operation + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + + # Track the operation with a request ID + request_id = "req_123" + server._request_to_operation[request_id] = operation.token + + # Handle cancellation + await server.handle_cancelled_notification(request_id) + + # Verify operation was cancelled + cancelled_op = await manager.get_operation(operation.token) + assert cancelled_op is not None + assert cancelled_op.status == "canceled" + + # Verify mapping was cleaned up + assert request_id not in server._request_to_operation + + @pytest.mark.anyio + async def test_cancelled_notification_handler(self): + """Test the async cancelled notification handler.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create an operation + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + + # Track the operation with a request ID + request_id = "req_456" + server._request_to_operation[request_id] = operation.token + + # Create cancelled notification + notification = types.CancelledNotification(params=types.CancelledNotificationParams(requestId=request_id)) + + await server._handle_cancelled_notification(notification) + + # Verify operation was cancelled + cancelled_op = await manager.get_operation(operation.token) + assert cancelled_op is not None + assert cancelled_op.status == "canceled" + + @pytest.mark.anyio + async def test_validate_operation_token_cancelled(self): + """Test that cancelled operations are rejected.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create and cancel an operation + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.cancel_operation(operation.token) + + # Verify that accessing cancelled operation raises error + with pytest.raises(McpError) as exc_info: + await server._validate_operation_token(operation.token) + + assert exc_info.value.error.code == -32602 + assert "cancelled" in exc_info.value.error.message.lower() + + @pytest.mark.anyio + async def test_nonexistent_request_id_cancellation(self): + """Test cancellation of non-existent request ID.""" + server = Server("Test") + + # Should not raise error for non-existent request ID + await server.handle_cancelled_notification("nonexistent_request") + + # Verify no operations were affected + assert len(server._request_to_operation) == 0 + + +class TestInputRequiredBehavior: + """Test input_required status handling for async operations.""" + + @pytest.mark.anyio + async def test_mark_input_required(self): + """Test marking operation as requiring input.""" + manager = ServerAsyncOperationManager() + + # Create operation in submitted state + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + assert operation.status == "submitted" + + # Mark as input required + result = await manager.mark_input_required(operation.token) + assert result is True + + # Verify status changed + updated_op = await manager.get_operation(operation.token) + assert updated_op is not None + assert updated_op.status == "input_required" + + @pytest.mark.anyio + async def test_mark_input_required_from_working(self): + """Test marking working operation as requiring input.""" + manager = ServerAsyncOperationManager() + + # Create and mark as working + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_working(operation.token) + assert operation.status == "working" + + # Mark as input required + result = await manager.mark_input_required(operation.token) + assert result is True + assert operation.status == "input_required" + + @pytest.mark.anyio + async def test_mark_input_required_invalid_states(self): + """Test that input_required can only be set from valid states.""" + manager = ServerAsyncOperationManager() + + # Test from completed state + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.complete_operation(operation.token, types.CallToolResult(content=[])) + + result = await manager.mark_input_required(operation.token) + assert result is False + assert operation.status == "completed" + + @pytest.mark.anyio + async def test_mark_input_completed(self): + """Test marking input as completed.""" + manager = ServerAsyncOperationManager() + + # Create operation and mark as input required + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_input_required(operation.token) + assert operation.status == "input_required" + + # Mark input as completed + result = await manager.mark_input_completed(operation.token) + assert result is True + assert operation.status == "working" + + @pytest.mark.anyio + async def test_mark_input_completed_invalid_state(self): + """Test that input can only be completed from input_required state.""" + manager = ServerAsyncOperationManager() + + # Create operation in submitted state + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + assert operation.status == "submitted" + + # Try to mark input completed from wrong state + result = await manager.mark_input_completed(operation.token) + assert result is False + assert operation.status == "submitted" + + @pytest.mark.anyio + async def test_nonexistent_token_operations(self): + """Test input_required operations on nonexistent tokens.""" + manager = ServerAsyncOperationManager() + + # Test with fake token + assert await manager.mark_input_required("fake_token") is False + assert await manager.mark_input_completed("fake_token") is False + + @pytest.mark.anyio + async def test_server_send_request_for_operation(self): + """Test server method for sending requests with operation tokens.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create operation + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_working(operation.token) + + # Create a mock request + request = types.ServerRequest( + types.CreateMessageRequest( + params=types.CreateMessageRequestParams( + messages=[types.SamplingMessage(role="user", content=types.TextContent(type="text", text="test"))], + maxTokens=100, + ) + ) + ) + + # Send request for operation + await server.send_request_for_operation(operation.token, request) + + # Verify operation status changed + updated_op = await manager.get_operation(operation.token) + assert updated_op is not None + assert updated_op.status == "input_required" + + @pytest.mark.anyio + async def test_server_complete_request_for_operation(self): + """Test server method for completing requests.""" + manager = ServerAsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create operation and mark as input required + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_input_required(operation.token) + + # Complete request for operation + await server.complete_request_for_operation(operation.token) + + # Verify operation status changed back to working + updated_op = await manager.get_operation(operation.token) + assert updated_op is not None + assert updated_op.status == "working" + + @pytest.mark.anyio + async def test_input_required_is_terminal_check(self): + """Test that input_required is not considered a terminal state.""" + manager = ServerAsyncOperationManager() + + # Create operation and mark as input required + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_input_required(operation.token) + + # Verify it's not terminal + assert not operation.is_terminal + + # Verify it doesn't expire while in input_required state + assert not operation.is_expired diff --git a/tests/shared/test_async_operations.py b/tests/shared/test_async_operations.py new file mode 100644 index 000000000..4a0707036 --- /dev/null +++ b/tests/shared/test_async_operations.py @@ -0,0 +1,231 @@ +"""Tests for AsyncOperationManager.""" + +import secrets +import time +from typing import Any, cast +from unittest.mock import Mock + +import pytest + +import mcp.types as types +from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager +from mcp.types import AsyncOperationStatus + + +class TestAsyncOperationManager: + """Test AsyncOperationManager functionality.""" + + async def _create_manager_with_operation( + self, session_id: str = "session1", **kwargs: Any + ) -> tuple[ServerAsyncOperationManager, ServerAsyncOperation]: + """Helper to create manager with a test operation.""" + manager = ServerAsyncOperationManager() + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id=session_id, **kwargs) + return manager, operation + + def test_token_generation(self): + """Test token generation with default and custom generators.""" + # Default token generation + manager = ServerAsyncOperationManager() + token1 = manager.generate_token("test_session") + token2 = manager.generate_token("test_session") + assert token1 != token2 and len(token1) > 20 and not token1.startswith("test_session_") + + # Custom token generator + custom_manager = ServerAsyncOperationManager(token_generator=lambda sid: f"custom_{sid}_token") + assert custom_manager.generate_token("test") == "custom_test_token" + + # Session-scoped token generator + scoped_manager = ServerAsyncOperationManager(token_generator=lambda sid: f"{sid}_{secrets.token_urlsafe(16)}") + token1, token2 = scoped_manager.generate_token("s1"), scoped_manager.generate_token("s2") + assert token1.startswith("s1_") and token2.startswith("s2_") and token1 != token2 + + @pytest.mark.anyio + async def test_operation_lifecycle(self): + """Test complete operation lifecycle including direct transitions.""" + manager, operation = await self._create_manager_with_operation() + token = operation.token + + # Test creation + assert operation.status == "submitted" and operation.result is None + + # Test working transition + assert await manager.mark_working(token) + working_op = await manager.get_operation(token) + assert working_op is not None and working_op.status == "working" + + # Test completion + result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) + assert await manager.complete_operation(token, result) + completed_op = await manager.get_operation(token) + assert completed_op is not None + assert completed_op.status == "completed" and completed_op.result == result + assert await manager.get_operation_result(token) == result + + # Test direct completion from submitted (new manager to avoid interference) + direct_manager, direct_op = await self._create_manager_with_operation() + assert await direct_manager.complete_operation(direct_op.token, result) + direct_completed = await direct_manager.get_operation(direct_op.token) + assert direct_completed is not None and direct_completed.status == "completed" + + # Test direct failure from submitted (new manager to avoid interference) + fail_manager, fail_op = await self._create_manager_with_operation() + assert await fail_manager.fail_operation(fail_op.token, "immediate error") + failed = await fail_manager.get_operation(fail_op.token) + assert failed is not None + assert failed.status == "failed" and failed.error == "immediate error" + + @pytest.mark.anyio + async def test_operation_failure_and_cancellation(self): + """Test operation failure and cancellation.""" + manager, operation = await self._create_manager_with_operation() + + # Test failure + await manager.mark_working(operation.token) + assert await manager.fail_operation(operation.token, "Something went wrong") + failed_op = await manager.get_operation(operation.token) + assert failed_op is not None + assert failed_op.status == "failed" and failed_op.error == "Something went wrong" + assert await manager.get_operation_result(operation.token) is None + + # Test cancellation (new manager to avoid interference) + cancel_manager, cancel_op = await self._create_manager_with_operation() + assert await cancel_manager.cancel_operation(cancel_op.token) + canceled_op = await cancel_manager.get_operation(cancel_op.token) + assert canceled_op is not None and canceled_op.status == "canceled" + + @pytest.mark.anyio + async def test_state_transitions_and_terminal_states(self): + """Test state transition validation and terminal state immutability.""" + manager, operation = await self._create_manager_with_operation() + token = operation.token + result = Mock() + + # Valid transitions + assert await manager.mark_working(token) + assert await manager.complete_operation(token, result) + + # Invalid transitions from terminal state + assert not await manager.mark_working(token) + assert not await manager.fail_operation(token, "error") + assert not await manager.cancel_operation(token) + completed_check = await manager.get_operation(token) + assert completed_check is not None and completed_check.status == "completed" + + # Test other terminal states (use separate managers since previous operation is already completed) + async def fail_action(m: ServerAsyncOperationManager, t: str) -> bool: + return await m.fail_operation(t, "err") + + async def cancel_action(m: ServerAsyncOperationManager, t: str) -> bool: + return await m.cancel_operation(t) + + for status, action in [ + ("failed", fail_action), + ("canceled", cancel_action), + ]: + test_manager, test_op = await self._create_manager_with_operation() + await action(test_manager, test_op.token) + terminal_op = await test_manager.get_operation(test_op.token) + assert terminal_op is not None + assert terminal_op.status == status and terminal_op.is_terminal + + @pytest.mark.anyio + async def test_nonexistent_token_operations(self): + """Test operations on nonexistent tokens.""" + manager = ServerAsyncOperationManager() + fake_token = "fake_token" + + for method, args in [ + ("get_operation", ()), + ("mark_working", ()), + ("complete_operation", (Mock(),)), + ("fail_operation", ("error",)), + ("cancel_operation", ()), + ("get_operation_result", ()), + ]: + result = await getattr(manager, method)(fake_token, *args) + assert result in (None, False) + + @pytest.mark.anyio + async def test_expiration_and_cleanup(self): + """Test operation expiration and cleanup.""" + manager = ServerAsyncOperationManager() + + # Create operations with different expiration times + short_op = await manager.create_operation("tool1", {}, keep_alive=1, session_id="session1") + long_op = await manager.create_operation("tool2", {}, keep_alive=10, session_id="session1") + + # Complete both and make first expired + for op in [short_op, long_op]: + await manager.complete_operation(op.token, Mock()) + short_op.resolved_at = time.time() - 2 + + # Test expiration detection + assert short_op.is_expired and not long_op.is_expired + + # Test cleanup + removed_count = await manager.cleanup_expired() + assert removed_count == 1 + assert await manager.get_operation(short_op.token) is None + assert await manager.get_operation(long_op.token) is not None + + @pytest.mark.anyio + async def test_concurrent_operations(self): + """Test concurrent operation handling and memory management.""" + manager = ServerAsyncOperationManager() + + # Create many operations + operations = [ + await manager.create_operation(f"tool_{i}", {"data": "x" * 100}, session_id=f"session_{i % 3}") + for i in range(50) + ] + + # All should be created successfully with unique tokens + assert len(operations) == 50 + tokens = [op.token for op in operations] + assert len(set(tokens)) == 50 + + # Complete half with short keepAlive and make them expired + for i in range(25): + await manager.complete_operation(operations[i].token, Mock()) + operations[i].keep_alive = 1 + operations[i].resolved_at = time.time() - 2 + + # Cleanup should remove expired operations + removed_count = await manager.cleanup_expired() + assert removed_count == 25 + + +class TestAsyncOperation: + """Test AsyncOperation dataclass.""" + + def test_terminal_and_expiration_logic(self): + """Test terminal state detection and expiration logic.""" + now = time.time() + operation = ServerAsyncOperation("test", "test", {}, "submitted", now, 3600) + + # Test terminal state detection + for status_str, is_terminal in [ + ("submitted", False), + ("working", False), + ("completed", True), + ("failed", True), + ("canceled", True), + ("unknown", True), + ]: + status: AsyncOperationStatus = cast(AsyncOperationStatus, status_str) + operation.status = status + assert operation.is_terminal == is_terminal + + # Test expiration logic + working_status: AsyncOperationStatus = "working" + operation.status = working_status + assert not operation.is_expired # Non-terminal never expires + + completed_status: AsyncOperationStatus = "completed" + operation.status = completed_status + operation.resolved_at = now - 1800 # 30 minutes ago + assert not operation.is_expired # Within keepAlive + + operation.resolved_at = now - 7200 # 2 hours ago + assert operation.is_expired # Past keepAlive diff --git a/tests/shared/test_progress_notifications.py b/tests/shared/test_progress_notifications.py index 600972272..abb9d49c1 100644 --- a/tests/shared/test_progress_notifications.py +++ b/tests/shared/test_progress_notifications.py @@ -277,7 +277,9 @@ async def handle_client_message( meta = types.RequestParams.Meta(progressToken=progress_token) request_context = RequestContext( request_id="test-request", + operation_token=None, session=client_session, + supports_async=False, meta=meta, lifespan_context=None, ) diff --git a/uv.lock b/uv.lock index 68abdcc4f..91c417e49 100644 --- a/uv.lock +++ b/uv.lock @@ -12,7 +12,9 @@ members = [ "mcp-simple-streamablehttp", "mcp-simple-streamablehttp-stateless", "mcp-simple-tool", + "mcp-simple-tool-async", "mcp-snippets", + "mcp-sqlite-async-operations", ] [[package]] @@ -937,6 +939,39 @@ dev = [ { name = "ruff", specifier = ">=0.6.9" }, ] +[[package]] +name = "mcp-simple-tool-async" +version = "0.1.0" +source = { editable = "examples/servers/simple-tool-async" } +dependencies = [ + { name = "anyio" }, + { name = "click" }, + { name = "httpx" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "click", specifier = ">=8.2.0" }, + { name = "httpx", specifier = ">=0.27" }, + { name = "mcp", editable = "." }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.378" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + [[package]] name = "mcp-snippets" version = "0.1.0" @@ -948,6 +983,39 @@ dependencies = [ [package.metadata] requires-dist = [{ name = "mcp", editable = "." }] +[[package]] +name = "mcp-sqlite-async-operations" +version = "0.1.0" +source = { editable = "examples/servers/sqlite-async-operations" } +dependencies = [ + { name = "anyio" }, + { name = "click" }, + { name = "httpx" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "click", specifier = ">=8.2.0" }, + { name = "httpx", specifier = ">=0.27" }, + { name = "mcp", editable = "." }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.378" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + [[package]] name = "mdurl" version = "0.1.2"