|
| 1 | +import asyncio |
| 2 | +import json |
| 3 | +import logging |
| 4 | +from contextlib import asynccontextmanager |
| 5 | +from typing import Any |
| 6 | +from urllib.parse import quote |
| 7 | +from uuid import UUID |
| 8 | + |
| 9 | +import anyio |
| 10 | +from mcp import types |
| 11 | +from pydantic import ValidationError |
| 12 | +from sse_starlette import EventSourceResponse |
| 13 | +from starlette.background import BackgroundTask |
| 14 | +from starlette.requests import Request |
| 15 | +from starlette.responses import Response |
| 16 | +from starlette.types import Receive, Scope, Send |
| 17 | + |
| 18 | +from .session import Session, SessionManager |
| 19 | + |
| 20 | +logger = logging.getLogger(__name__) |
| 21 | + |
| 22 | +def patch_meta_data(body: bytes, **kwargs) -> bytes: |
| 23 | + data = json.loads(body.decode("utf-8")) |
| 24 | + if "params" not in data: |
| 25 | + data["params"] = {} |
| 26 | + |
| 27 | + for key, value in kwargs.items(): |
| 28 | + data["params"].setdefault("_meta", {})[key] = value |
| 29 | + return json.dumps(data).encode("utf-8") |
| 30 | + |
| 31 | +class SseTransport: |
| 32 | + |
| 33 | + def __init__(self, endpoint: str, session_manager: SessionManager) -> None: |
| 34 | + self.session_manager = session_manager |
| 35 | + self._endpoint = endpoint |
| 36 | + |
| 37 | + @asynccontextmanager |
| 38 | + async def connect_sse(self, scope: Scope, receive: Receive, send: Send): |
| 39 | + session_id_hex = scope["session_id"] |
| 40 | + session_id: UUID = UUID(hex=session_id_hex) |
| 41 | + session = await self.session_manager.get_session(session_id) |
| 42 | + |
| 43 | + session_uri = f"{quote(self._endpoint)}?session_id={session_id.hex}" |
| 44 | + |
| 45 | + sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[dict[str, Any]](0) |
| 46 | + |
| 47 | + async def sse_writer(): |
| 48 | + logger.debug("Starting SSE writer") |
| 49 | + async with sse_stream_writer, session["write_stream_reader"]: |
| 50 | + await sse_stream_writer.send({"event": "endpoint", "data": session_uri}) |
| 51 | + logger.debug(f"Sent endpoint event: {session_uri}") |
| 52 | + |
| 53 | + async for message in session["write_stream_reader"]: |
| 54 | + logger.debug(f"Sending message via SSE: {message}") |
| 55 | + await sse_stream_writer.send( |
| 56 | + { |
| 57 | + "event": "message", |
| 58 | + "data": message.model_dump_json(by_alias=True, exclude_none=True), |
| 59 | + } |
| 60 | + ) |
| 61 | + |
| 62 | + async with anyio.create_task_group() as tg: |
| 63 | + async def on_client_disconnect(): |
| 64 | + await self.session_manager.close_session(session_id) |
| 65 | + |
| 66 | + try: |
| 67 | + response = EventSourceResponse( |
| 68 | + content=sse_stream_reader, |
| 69 | + data_sender_callable=sse_writer, |
| 70 | + background=BackgroundTask(on_client_disconnect), |
| 71 | + ) |
| 72 | + logger.debug("Starting SSE response task") |
| 73 | + tg.start_soon(response, scope, receive, send) |
| 74 | + |
| 75 | + logger.debug("Yielding read and write streams") |
| 76 | + # Due to limitations with interrupting the MCP server run operation, |
| 77 | + # this will always block here regardless of client disconnection status |
| 78 | + yield (session["read_stream"], session["write_stream"]) |
| 79 | + except asyncio.CancelledError as exc: |
| 80 | + logger.warning(f"SSE connection for session {session_id} was cancelled") |
| 81 | + tg.cancel_scope.cancel() |
| 82 | + # raise the exception again so that to interrupt mcp server run operation |
| 83 | + raise exc |
| 84 | + finally: |
| 85 | + # for server shutdown |
| 86 | + await self.session_manager.cleanup_resources() |
| 87 | + |
| 88 | + async def handle_post_message(self, scope: Scope, receive: Receive, send: Send): |
| 89 | + |
| 90 | + session_id = scope["session_id"] |
| 91 | + session: Session = await self.session_manager.get_session(UUID(hex=session_id)) |
| 92 | + |
| 93 | + request = Request(scope, receive) |
| 94 | + body = await request.body() |
| 95 | + # patch meta data |
| 96 | + body = patch_meta_data(body, **session["meta"]) |
| 97 | + |
| 98 | + # send message to writer |
| 99 | + writer = session["read_stream_writer"] |
| 100 | + try: |
| 101 | + message = types.JSONRPCMessage.model_validate_json(body) |
| 102 | + logger.debug(f"Validated client message: {message}") |
| 103 | + except ValidationError as err: |
| 104 | + logger.error(f"Failed to parse message: {err}") |
| 105 | + response = Response("Could not parse message", status_code=400) |
| 106 | + await response(scope, receive, send) |
| 107 | + try: |
| 108 | + await writer.send(err) |
| 109 | + except (BrokenPipeError, ConnectionError, OSError) as pipe_err: |
| 110 | + logger.warning(f"Failed to send error due to pipe issue: {pipe_err}") |
| 111 | + return |
| 112 | + |
| 113 | + logger.debug(f"Sending message to writer: {message}") |
| 114 | + response = Response("Accepted", status_code=202) |
| 115 | + await response(scope, receive, send) |
| 116 | + |
| 117 | + # add error handling, catch possible pipe errors |
| 118 | + try: |
| 119 | + await writer.send(message) |
| 120 | + except (BrokenPipeError, ConnectionError, OSError) as e: |
| 121 | + # if it's EPIPE error or other connection error, log it but don't throw an exception |
| 122 | + if isinstance(e, OSError) and e.errno == 32: # EPIPE |
| 123 | + logger.warning(f"EPIPE error when sending message to session {session_id}, connection may be closing") |
| 124 | + else: |
| 125 | + logger.warning(f"Connection error when sending message to session {session_id}: {e}") |
| 126 | + await self.session_manager.close_session(session_id) |
0 commit comments