|
| 1 | +"""EventStore implementation backed by AsyncKeyValue. |
| 2 | +
|
| 3 | +This module provides an EventStore implementation that enables SSE polling/resumability |
| 4 | +for Streamable HTTP transports. Events are stored using the key_value package's |
| 5 | +AsyncKeyValue protocol, allowing users to configure any compatible backend |
| 6 | +(in-memory, Redis, etc.) following the same pattern as ResponseCachingMiddleware. |
| 7 | +""" |
| 8 | + |
| 9 | +from __future__ import annotations |
| 10 | + |
| 11 | +from uuid import uuid4 |
| 12 | + |
| 13 | +from key_value.aio.adapters.pydantic import PydanticAdapter |
| 14 | +from key_value.aio.protocols import AsyncKeyValue |
| 15 | +from key_value.aio.stores.memory import MemoryStore |
| 16 | +from mcp.server.streamable_http import EventCallback, EventId, EventMessage, StreamId |
| 17 | +from mcp.server.streamable_http import EventStore as SDKEventStore |
| 18 | +from mcp.types import JSONRPCMessage |
| 19 | +from pydantic import BaseModel |
| 20 | + |
| 21 | +from fastmcp.utilities.logging import get_logger |
| 22 | + |
| 23 | +logger = get_logger(__name__) |
| 24 | + |
| 25 | + |
| 26 | +class EventEntry(BaseModel): |
| 27 | + """Stored event entry.""" |
| 28 | + |
| 29 | + event_id: str |
| 30 | + stream_id: str |
| 31 | + message: dict | None # JSONRPCMessage serialized to dict |
| 32 | + |
| 33 | + |
| 34 | +class StreamEventList(BaseModel): |
| 35 | + """List of event IDs for a stream.""" |
| 36 | + |
| 37 | + event_ids: list[str] |
| 38 | + |
| 39 | + |
| 40 | +class EventStore(SDKEventStore): |
| 41 | + """EventStore implementation backed by AsyncKeyValue. |
| 42 | +
|
| 43 | + Enables SSE polling/resumability by storing events that can be replayed |
| 44 | + when clients reconnect. Works with any AsyncKeyValue backend (memory, Redis, etc.) |
| 45 | + following the same pattern as ResponseCachingMiddleware and OAuthProxy. |
| 46 | +
|
| 47 | + Example: |
| 48 | + ```python |
| 49 | + from fastmcp import FastMCP |
| 50 | + from fastmcp.server.event_store import EventStore |
| 51 | +
|
| 52 | + # Default in-memory storage |
| 53 | + event_store = EventStore() |
| 54 | +
|
| 55 | + # Or with a custom backend |
| 56 | + from key_value.aio.stores.redis import RedisStore |
| 57 | + redis_backend = RedisStore(url="redis://localhost") |
| 58 | + event_store = EventStore(storage=redis_backend) |
| 59 | +
|
| 60 | + mcp = FastMCP("MyServer") |
| 61 | + app = mcp.http_app(event_store=event_store, retry_interval=2000) |
| 62 | + ``` |
| 63 | +
|
| 64 | + Args: |
| 65 | + storage: AsyncKeyValue backend. Defaults to MemoryStore. |
| 66 | + max_events_per_stream: Maximum events to retain per stream. Default 100. |
| 67 | + ttl: Event TTL in seconds. Default 3600 (1 hour). Set to None for no expiration. |
| 68 | + """ |
| 69 | + |
| 70 | + def __init__( |
| 71 | + self, |
| 72 | + storage: AsyncKeyValue | None = None, |
| 73 | + max_events_per_stream: int = 100, |
| 74 | + ttl: int | None = 3600, |
| 75 | + ): |
| 76 | + self._storage: AsyncKeyValue = storage or MemoryStore() |
| 77 | + self._max_events_per_stream = max_events_per_stream |
| 78 | + self._ttl = ttl |
| 79 | + |
| 80 | + # PydanticAdapter for type-safe storage (following OAuth proxy pattern) |
| 81 | + self._event_store: PydanticAdapter[EventEntry] = PydanticAdapter[EventEntry]( |
| 82 | + key_value=self._storage, |
| 83 | + pydantic_model=EventEntry, |
| 84 | + default_collection="fastmcp_events", |
| 85 | + ) |
| 86 | + self._stream_store: PydanticAdapter[StreamEventList] = PydanticAdapter[ |
| 87 | + StreamEventList |
| 88 | + ]( |
| 89 | + key_value=self._storage, |
| 90 | + pydantic_model=StreamEventList, |
| 91 | + default_collection="fastmcp_streams", |
| 92 | + ) |
| 93 | + |
| 94 | + async def store_event( |
| 95 | + self, stream_id: StreamId, message: JSONRPCMessage | None |
| 96 | + ) -> EventId: |
| 97 | + """Store an event and return its ID. |
| 98 | +
|
| 99 | + Args: |
| 100 | + stream_id: ID of the stream the event belongs to |
| 101 | + message: The JSON-RPC message to store, or None for priming events |
| 102 | +
|
| 103 | + Returns: |
| 104 | + The generated event ID for the stored event |
| 105 | + """ |
| 106 | + event_id = str(uuid4()) |
| 107 | + |
| 108 | + # Store the event entry |
| 109 | + entry = EventEntry( |
| 110 | + event_id=event_id, |
| 111 | + stream_id=stream_id, |
| 112 | + message=message.model_dump(mode="json") if message else None, |
| 113 | + ) |
| 114 | + await self._event_store.put(key=event_id, value=entry, ttl=self._ttl) |
| 115 | + |
| 116 | + # Update stream's event list |
| 117 | + stream_data = await self._stream_store.get(key=stream_id) |
| 118 | + event_ids = stream_data.event_ids if stream_data else [] |
| 119 | + event_ids.append(event_id) |
| 120 | + |
| 121 | + # Trim to max events (delete old events) |
| 122 | + if len(event_ids) > self._max_events_per_stream: |
| 123 | + for old_id in event_ids[: -self._max_events_per_stream]: |
| 124 | + await self._event_store.delete(key=old_id) |
| 125 | + event_ids = event_ids[-self._max_events_per_stream :] |
| 126 | + |
| 127 | + await self._stream_store.put( |
| 128 | + key=stream_id, |
| 129 | + value=StreamEventList(event_ids=event_ids), |
| 130 | + ttl=self._ttl, |
| 131 | + ) |
| 132 | + |
| 133 | + return event_id |
| 134 | + |
| 135 | + async def replay_events_after( |
| 136 | + self, |
| 137 | + last_event_id: EventId, |
| 138 | + send_callback: EventCallback, |
| 139 | + ) -> StreamId | None: |
| 140 | + """Replay events that occurred after the specified event ID. |
| 141 | +
|
| 142 | + Args: |
| 143 | + last_event_id: The ID of the last event the client received |
| 144 | + send_callback: A callback function to send events to the client |
| 145 | +
|
| 146 | + Returns: |
| 147 | + The stream ID of the replayed events, or None if the event ID was not found |
| 148 | + """ |
| 149 | + # Look up the event to find its stream |
| 150 | + entry = await self._event_store.get(key=last_event_id) |
| 151 | + if not entry: |
| 152 | + logger.warning(f"Event ID {last_event_id} not found in store") |
| 153 | + return None |
| 154 | + |
| 155 | + stream_id = entry.stream_id |
| 156 | + stream_data = await self._stream_store.get(key=stream_id) |
| 157 | + if not stream_data: |
| 158 | + logger.warning(f"Stream {stream_id} not found in store") |
| 159 | + return None |
| 160 | + |
| 161 | + event_ids = stream_data.event_ids |
| 162 | + |
| 163 | + # Find events after last_event_id |
| 164 | + try: |
| 165 | + start_idx = event_ids.index(last_event_id) + 1 |
| 166 | + except ValueError: |
| 167 | + logger.warning(f"Event ID {last_event_id} not found in stream {stream_id}") |
| 168 | + return None |
| 169 | + |
| 170 | + # Replay events after the last one |
| 171 | + for event_id in event_ids[start_idx:]: |
| 172 | + event = await self._event_store.get(key=event_id) |
| 173 | + if event and event.message: |
| 174 | + msg = JSONRPCMessage.model_validate(event.message) |
| 175 | + await send_callback(EventMessage(msg, event.event_id)) |
| 176 | + |
| 177 | + return stream_id |
0 commit comments