|
| 1 | +import json |
| 2 | +import time |
| 3 | +import os |
| 4 | +from typing import List, Dict, Any, Union |
| 5 | +from enum import Enum |
| 6 | +from dataclasses import asdict |
| 7 | + |
| 8 | +import redis.asyncio as redis |
| 9 | + |
| 10 | +from .user_message import ChatInteraction |
| 11 | +from .status_update import StatusUpdate |
| 12 | + |
| 13 | +class EventType(str, Enum): |
| 14 | + """Event types for the chat event stream""" |
| 15 | + CHAT_INTERACTION = "chat_interaction" |
| 16 | + STATUS_UPDATE = "status_update" |
| 17 | + |
| 18 | +class EventStreamManager: |
| 19 | + """ |
| 20 | + Manages a Redis list-based event stream for chat conversations. |
| 21 | + |
| 22 | + Uses LPUSH for O(1) appends and LRANGE for efficient range queries. |
| 23 | + Each event has an implicit sequence number based on its position in the list. |
| 24 | + """ |
| 25 | + |
| 26 | + def __init__(self, redis_host: str = None, redis_port: int = None): |
| 27 | + self.redis_host = redis_host or os.getenv("REDIS_HOST", "localhost") |
| 28 | + self.redis_port = redis_port or int(os.getenv("REDIS_PORT", "6379")) |
| 29 | + self.redis_client = redis.Redis( |
| 30 | + host=self.redis_host, |
| 31 | + port=self.redis_port, |
| 32 | + decode_responses=True |
| 33 | + ) |
| 34 | + |
| 35 | + def _get_stream_key(self, workflow_id: str) -> str: |
| 36 | + """Get the Redis key for the event stream""" |
| 37 | + return f"events:{workflow_id}" |
| 38 | + |
| 39 | + def _get_meta_key(self, workflow_id: str) -> str: |
| 40 | + """Get the Redis key for stream metadata""" |
| 41 | + return f"events:{workflow_id}:meta" |
| 42 | + |
| 43 | + async def append_chat_interaction( |
| 44 | + self, |
| 45 | + workflow_id: str, |
| 46 | + chat_interaction: ChatInteraction |
| 47 | + ) -> int: |
| 48 | + """ |
| 49 | + Append a chat interaction to the stream. |
| 50 | + |
| 51 | + Returns the new total length of the event stream. |
| 52 | + """ |
| 53 | + return await self._append_domain_event( |
| 54 | + workflow_id, |
| 55 | + EventType.CHAT_INTERACTION, |
| 56 | + chat_interaction |
| 57 | + ) |
| 58 | + |
| 59 | + async def append_status_update( |
| 60 | + self, |
| 61 | + workflow_id: str, |
| 62 | + status_update: StatusUpdate |
| 63 | + ) -> int: |
| 64 | + """ |
| 65 | + Append a status update to the stream. |
| 66 | + |
| 67 | + Returns the new total length of the event stream. |
| 68 | + """ |
| 69 | + return await self._append_domain_event( |
| 70 | + workflow_id, |
| 71 | + EventType.STATUS_UPDATE, |
| 72 | + status_update |
| 73 | + ) |
| 74 | + |
| 75 | + async def _append_domain_event( |
| 76 | + self, |
| 77 | + workflow_id: str, |
| 78 | + event_type: EventType, |
| 79 | + domain_object: Union[ChatInteraction, StatusUpdate] |
| 80 | + ) -> int: |
| 81 | + """ |
| 82 | + Internal method to append domain objects to the stream. |
| 83 | + |
| 84 | + Returns the new total length of the event stream. |
| 85 | + """ |
| 86 | + stream_key = self._get_stream_key(workflow_id) |
| 87 | + |
| 88 | + # Convert domain object to dict |
| 89 | + content_dict = asdict(domain_object) |
| 90 | + |
| 91 | + # Build the event with structured content |
| 92 | + event = { |
| 93 | + "type": event_type.value, |
| 94 | + "content": content_dict |
| 95 | + } |
| 96 | + |
| 97 | + event_json = json.dumps(event) |
| 98 | + |
| 99 | + # Use RPUSH to add to the end (chronological order) |
| 100 | + # RPUSH returns the new length of the list after insertion |
| 101 | + new_length = await self.redis_client.rpush(stream_key, event_json) |
| 102 | + |
| 103 | + return new_length |
| 104 | + |
| 105 | + async def get_events_from_index( |
| 106 | + self, |
| 107 | + workflow_id: str, |
| 108 | + from_index: int = 0 |
| 109 | + ) -> List[Dict[str, Any]]: |
| 110 | + """ |
| 111 | + Get events starting from a specific index. |
| 112 | + |
| 113 | + Args: |
| 114 | + workflow_id: The workflow ID |
| 115 | + from_index: Start from this index (0-based) |
| 116 | + |
| 117 | + Returns: |
| 118 | + List of events in chronological order |
| 119 | + """ |
| 120 | + stream_key = self._get_stream_key(workflow_id) |
| 121 | + |
| 122 | + # Get all events from the specified index to the end |
| 123 | + event_strings = await self.redis_client.lrange(stream_key, from_index, -1) |
| 124 | + |
| 125 | + # Parse events |
| 126 | + events = [] |
| 127 | + for event_str in event_strings: |
| 128 | + try: |
| 129 | + event = json.loads(event_str) |
| 130 | + events.append(event) |
| 131 | + except json.JSONDecodeError: |
| 132 | + continue # Skip malformed events |
| 133 | + |
| 134 | + return events |
| 135 | + |
| 136 | + async def get_all_events(self, workflow_id: str) -> List[Dict[str, Any]]: |
| 137 | + """ |
| 138 | + Get all events in the stream. |
| 139 | + |
| 140 | + Returns events in chronological order. |
| 141 | + """ |
| 142 | + stream_key = self._get_stream_key(workflow_id) |
| 143 | + |
| 144 | + # Get all events |
| 145 | + event_strings = await self.redis_client.lrange(stream_key, 0, -1) |
| 146 | + |
| 147 | + # Parse events (already in chronological order due to RPUSH) |
| 148 | + events = [] |
| 149 | + for event_str in event_strings: |
| 150 | + try: |
| 151 | + events.append(json.loads(event_str)) |
| 152 | + except json.JSONDecodeError: |
| 153 | + continue # Skip malformed events |
| 154 | + |
| 155 | + return events |
| 156 | + |
| 157 | + async def get_total_events(self, workflow_id: str) -> int: |
| 158 | + """Get the total number of events in the stream""" |
| 159 | + stream_key = self._get_stream_key(workflow_id) |
| 160 | + return await self.redis_client.llen(stream_key) |
| 161 | + |
| 162 | + async def delete_stream(self, workflow_id: str) -> bool: |
| 163 | + """ |
| 164 | + Delete the entire event stream for a workflow. |
| 165 | + |
| 166 | + Returns True if the stream was deleted, False if it didn't exist. |
| 167 | + """ |
| 168 | + stream_key = self._get_stream_key(workflow_id) |
| 169 | + meta_key = self._get_meta_key(workflow_id) |
| 170 | + |
| 171 | + # Delete both the stream and metadata |
| 172 | + deleted = await self.redis_client.delete(stream_key, meta_key) |
| 173 | + |
| 174 | + return deleted > 0 |
| 175 | + |
| 176 | + async def close(self): |
| 177 | + """Close the Redis connection""" |
| 178 | + await self.redis_client.aclose() |
0 commit comments