diff --git a/pyproject.toml b/pyproject.toml index 1cd4d683d..cf09a589f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,9 @@ voice = ["numpy>=2.2.0, <3; python_version>='3.10'", "websockets>=15.0, <16"] viz = ["graphviz>=0.17"] litellm = ["litellm>=1.67.4.post1, <2"] realtime = ["websockets>=15.0, <16"] +pymongo = [ + "pymongo>=4.13.2", +] [dependency-groups] dev = [ diff --git a/src/agents/memory/__init__.py b/src/agents/memory/__init__.py index 059ca57ab..964f30eea 100644 --- a/src/agents/memory/__init__.py +++ b/src/agents/memory/__init__.py @@ -1,3 +1,4 @@ -from .session import Session, SQLiteSession +from .session import Session +from .sqlite_session import SQLiteSession __all__ = ["Session", "SQLiteSession"] diff --git a/src/agents/memory/mongodb_session.py b/src/agents/memory/mongodb_session.py new file mode 100644 index 000000000..71c8e41a3 --- /dev/null +++ b/src/agents/memory/mongodb_session.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +import json +from datetime import UTC, datetime +from typing import TYPE_CHECKING, Any + +from .session_abc import SessionABC + +try: + from pymongo import DESCENDING, AsyncMongoClient + from pymongo.asynchronous.database import AsyncDatabase +except ImportError as _e: + raise ImportError( + "`pymongo` is required to use the MongoDBSession. You can install it via the optional " + "dependency group: `pip install 'openai-agents[pymongo]'`." + ) from _e + +if TYPE_CHECKING: + from agents.items import TResponseInputItem + + +class MongoDBSession(SessionABC): + """MongoDB-based implementation of session storage.""" + + _initialized = False + + def __init__( + self, + session_id: str, + db: AsyncDatabase, + sessions_table: str = "agent_sessions", + messages_table: str = "agent_messages", + ): + self.session_id = session_id + self.db = db + self.sessions_table = sessions_table + self.messages_table = messages_table + self.sessions_collection = db[sessions_table] + self.messages_collection = db[messages_table] + + @classmethod + def from_connection_string( + cls, + session_id: str, + conn_str: str, + db_name: str, + sessions_table: str = "agent_sessions", + messages_table: str = "agent_messages", + **kwargs: Any, + ) -> MongoDBSession: + client = AsyncMongoClient(conn_str, **kwargs) + db = client[db_name] + return cls(session_id, db, sessions_table, messages_table) + + async def _init_db(self) -> None: + """Initialize the database collections.""" + collection_names = await self.db.list_collection_names() + + if self.sessions_table not in collection_names: + await self.db.create_collection(self.sessions_table) + await self.sessions_collection.create_index("session_id", unique=True) + + if self.messages_table not in collection_names: + await self.db.create_collection(self.messages_table) + await self.messages_collection.create_index(["session_id", "created_at"]) + + async def _ensure_initialized(self) -> None: + """Ensure the database schema is initialized.""" + if not self._initialized: + await self._init_db() + + async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]: + """Retrieve the conversation history for this session. + + Args: + limit: Maximum number of items to retrieve. If None, retrieves all items. + When specified, returns the latest N items in chronological order. + + Returns: + List of input items representing the conversation history + """ + await self._ensure_initialized() + + documents = ( + self.messages_collection.find({}, {"_id": False, "message_data": True}).sort( + "created_at", DESCENDING + ) + if limit is None + else self.messages_collection.find({}, {"_id": False, "message_data": True}) + .sort("created_at", DESCENDING) + .limit(limit) + ) + + items = [] + async for doc in documents: + try: + item = json.loads(doc["message_data"]) + items.append(item) + except json.JSONDecodeError: + # Skip invalid JSON entries + continue + + return items + + async def add_items(self, items: list[TResponseInputItem]) -> None: + """Add new items to the conversation history. + + Args: + items: List of input items to add to the history + """ + if not items: + return + + await self._ensure_initialized() + + existing_session_entity = await self.sessions_collection.find_one( + {"session_id": self.session_id} + ) + if not existing_session_entity: + await self.sessions_collection.insert_one( + { + "session_id": self.session_id, + "created_at": datetime.now(UTC), + "updated_at": datetime.now(UTC), + } + ) + + message_data = [ + { + "session_id": self.session_id, + "message_data": json.dumps(item), + "created_at": datetime.now(UTC), + } + for item in items + ] + + await self.messages_collection.insert_many(message_data) + + await self.sessions_collection.update_one( + {"session_id": self.session_id}, {"$set": {"updated_at": datetime.now(UTC)}} + ) + + async def pop_item(self) -> TResponseInputItem | None: + await self._ensure_initialized() + + last_message = await self.messages_collection.find_one( + {"session_id": self.session_id}, sort=[("created_at", DESCENDING)] + ) + + if last_message: + await self.messages_collection.delete_one({"_id": last_message[""]}) + message_data = last_message["message_data"] + try: + item = json.loads(message_data) + return item + except json.JSONDecodeError: + return None + + return None + + async def clear_session(self) -> None: + await self._ensure_initialized() + + await self.messages_collection.delete_many({"session_id": self.session_id}) + await self.sessions_collection.delete_one({"session_id": self.session_id}) + + async def close(self) -> None: + await self.db.client.close() + + async def __aenter__(self): + """Async context manager entry.""" + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Async context manager exit.""" + await self.close() diff --git a/src/agents/memory/session.py b/src/agents/memory/session.py index 8db0971eb..57f54dd6b 100644 --- a/src/agents/memory/session.py +++ b/src/agents/memory/session.py @@ -1,11 +1,5 @@ from __future__ import annotations -import asyncio -import json -import sqlite3 -import threading -from abc import ABC, abstractmethod -from pathlib import Path from typing import TYPE_CHECKING, Protocol, runtime_checkable if TYPE_CHECKING: @@ -53,317 +47,3 @@ async def pop_item(self) -> TResponseInputItem | None: async def clear_session(self) -> None: """Clear all items for this session.""" ... - - -class SessionABC(ABC): - """Abstract base class for session implementations. - - Session stores conversation history for a specific session, allowing - agents to maintain context without requiring explicit manual memory management. - - This ABC is intended for internal use and as a base class for concrete implementations. - Third-party libraries should implement the Session protocol instead. - """ - - session_id: str - - @abstractmethod - async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]: - """Retrieve the conversation history for this session. - - Args: - limit: Maximum number of items to retrieve. If None, retrieves all items. - When specified, returns the latest N items in chronological order. - - Returns: - List of input items representing the conversation history - """ - ... - - @abstractmethod - async def add_items(self, items: list[TResponseInputItem]) -> None: - """Add new items to the conversation history. - - Args: - items: List of input items to add to the history - """ - ... - - @abstractmethod - async def pop_item(self) -> TResponseInputItem | None: - """Remove and return the most recent item from the session. - - Returns: - The most recent item if it exists, None if the session is empty - """ - ... - - @abstractmethod - async def clear_session(self) -> None: - """Clear all items for this session.""" - ... - - -class SQLiteSession(SessionABC): - """SQLite-based implementation of session storage. - - This implementation stores conversation history in a SQLite database. - By default, uses an in-memory database that is lost when the process ends. - For persistent storage, provide a file path. - """ - - def __init__( - self, - session_id: str, - db_path: str | Path = ":memory:", - sessions_table: str = "agent_sessions", - messages_table: str = "agent_messages", - ): - """Initialize the SQLite session. - - Args: - session_id: Unique identifier for the conversation session - db_path: Path to the SQLite database file. Defaults to ':memory:' (in-memory database) - sessions_table: Name of the table to store session metadata. Defaults to - 'agent_sessions' - messages_table: Name of the table to store message data. Defaults to 'agent_messages' - """ - self.session_id = session_id - self.db_path = db_path - self.sessions_table = sessions_table - self.messages_table = messages_table - self._local = threading.local() - self._lock = threading.Lock() - - # For in-memory databases, we need a shared connection to avoid thread isolation - # For file databases, we use thread-local connections for better concurrency - self._is_memory_db = str(db_path) == ":memory:" - if self._is_memory_db: - self._shared_connection = sqlite3.connect(":memory:", check_same_thread=False) - self._shared_connection.execute("PRAGMA journal_mode=WAL") - self._init_db_for_connection(self._shared_connection) - else: - # For file databases, initialize the schema once since it persists - init_conn = sqlite3.connect(str(self.db_path), check_same_thread=False) - init_conn.execute("PRAGMA journal_mode=WAL") - self._init_db_for_connection(init_conn) - init_conn.close() - - def _get_connection(self) -> sqlite3.Connection: - """Get a database connection.""" - if self._is_memory_db: - # Use shared connection for in-memory database to avoid thread isolation - return self._shared_connection - else: - # Use thread-local connections for file databases - if not hasattr(self._local, "connection"): - self._local.connection = sqlite3.connect( - str(self.db_path), - check_same_thread=False, - ) - self._local.connection.execute("PRAGMA journal_mode=WAL") - assert isinstance(self._local.connection, sqlite3.Connection), ( - f"Expected sqlite3.Connection, got {type(self._local.connection)}" - ) - return self._local.connection - - def _init_db_for_connection(self, conn: sqlite3.Connection) -> None: - """Initialize the database schema for a specific connection.""" - conn.execute( - f""" - CREATE TABLE IF NOT EXISTS {self.sessions_table} ( - session_id TEXT PRIMARY KEY, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """ - ) - - conn.execute( - f""" - CREATE TABLE IF NOT EXISTS {self.messages_table} ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - session_id TEXT NOT NULL, - message_data TEXT NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (session_id) REFERENCES {self.sessions_table} (session_id) - ON DELETE CASCADE - ) - """ - ) - - conn.execute( - f""" - CREATE INDEX IF NOT EXISTS idx_{self.messages_table}_session_id - ON {self.messages_table} (session_id, created_at) - """ - ) - - conn.commit() - - async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]: - """Retrieve the conversation history for this session. - - Args: - limit: Maximum number of items to retrieve. If None, retrieves all items. - When specified, returns the latest N items in chronological order. - - Returns: - List of input items representing the conversation history - """ - - def _get_items_sync(): - conn = self._get_connection() - with self._lock if self._is_memory_db else threading.Lock(): - if limit is None: - # Fetch all items in chronological order - cursor = conn.execute( - f""" - SELECT message_data FROM {self.messages_table} - WHERE session_id = ? - ORDER BY created_at ASC - """, - (self.session_id,), - ) - else: - # Fetch the latest N items in chronological order - cursor = conn.execute( - f""" - SELECT message_data FROM {self.messages_table} - WHERE session_id = ? - ORDER BY created_at DESC - LIMIT ? - """, - (self.session_id, limit), - ) - - rows = cursor.fetchall() - - # Reverse to get chronological order when using DESC - if limit is not None: - rows = list(reversed(rows)) - - items = [] - for (message_data,) in rows: - try: - item = json.loads(message_data) - items.append(item) - except json.JSONDecodeError: - # Skip invalid JSON entries - continue - - return items - - return await asyncio.to_thread(_get_items_sync) - - async def add_items(self, items: list[TResponseInputItem]) -> None: - """Add new items to the conversation history. - - Args: - items: List of input items to add to the history - """ - if not items: - return - - def _add_items_sync(): - conn = self._get_connection() - - with self._lock if self._is_memory_db else threading.Lock(): - # Ensure session exists - conn.execute( - f""" - INSERT OR IGNORE INTO {self.sessions_table} (session_id) VALUES (?) - """, - (self.session_id,), - ) - - # Add items - message_data = [(self.session_id, json.dumps(item)) for item in items] - conn.executemany( - f""" - INSERT INTO {self.messages_table} (session_id, message_data) VALUES (?, ?) - """, - message_data, - ) - - # Update session timestamp - conn.execute( - f""" - UPDATE {self.sessions_table} - SET updated_at = CURRENT_TIMESTAMP - WHERE session_id = ? - """, - (self.session_id,), - ) - - conn.commit() - - await asyncio.to_thread(_add_items_sync) - - async def pop_item(self) -> TResponseInputItem | None: - """Remove and return the most recent item from the session. - - Returns: - The most recent item if it exists, None if the session is empty - """ - - def _pop_item_sync(): - conn = self._get_connection() - with self._lock if self._is_memory_db else threading.Lock(): - # Use DELETE with RETURNING to atomically delete and return the most recent item - cursor = conn.execute( - f""" - DELETE FROM {self.messages_table} - WHERE id = ( - SELECT id FROM {self.messages_table} - WHERE session_id = ? - ORDER BY created_at DESC - LIMIT 1 - ) - RETURNING message_data - """, - (self.session_id,), - ) - - result = cursor.fetchone() - conn.commit() - - if result: - message_data = result[0] - try: - item = json.loads(message_data) - return item - except json.JSONDecodeError: - # Return None for corrupted JSON entries (already deleted) - return None - - return None - - return await asyncio.to_thread(_pop_item_sync) - - async def clear_session(self) -> None: - """Clear all items for this session.""" - - def _clear_session_sync(): - conn = self._get_connection() - with self._lock if self._is_memory_db else threading.Lock(): - conn.execute( - f"DELETE FROM {self.messages_table} WHERE session_id = ?", - (self.session_id,), - ) - conn.execute( - f"DELETE FROM {self.sessions_table} WHERE session_id = ?", - (self.session_id,), - ) - conn.commit() - - await asyncio.to_thread(_clear_session_sync) - - def close(self) -> None: - """Close the database connection.""" - if self._is_memory_db: - if hasattr(self, "_shared_connection"): - self._shared_connection.close() - else: - if hasattr(self._local, "connection"): - self._local.connection.close() diff --git a/src/agents/memory/session_abc.py b/src/agents/memory/session_abc.py new file mode 100644 index 000000000..4107f4ca6 --- /dev/null +++ b/src/agents/memory/session_abc.py @@ -0,0 +1,54 @@ +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ..items import TResponseInputItem + + +class SessionABC(ABC): + """Abstract base class for session implementations. + + Session stores conversation history for a specific session, allowing + agents to maintain context without requiring explicit manual memory management. + + This ABC is intended for internal use and as a base class for concrete implementations. + Third-party libraries should implement the Session protocol instead. + """ + + session_id: str + + @abstractmethod + async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]: + """Retrieve the conversation history for this session. + + Args: + limit: Maximum number of items to retrieve. If None, retrieves all items. + When specified, returns the latest N items in chronological order. + + Returns: + List of input items representing the conversation history + """ + ... + + @abstractmethod + async def add_items(self, items: list[TResponseInputItem]) -> None: + """Add new items to the conversation history. + + Args: + items: List of input items to add to the history + """ + ... + + @abstractmethod + async def pop_item(self) -> TResponseInputItem | None: + """Remove and return the most recent item from the session. + + Returns: + The most recent item if it exists, None if the session is empty + """ + ... + + @abstractmethod + async def clear_session(self) -> None: + """Clear all items for this session.""" + ... diff --git a/src/agents/memory/sqlite_session.py b/src/agents/memory/sqlite_session.py new file mode 100644 index 000000000..6a70c4e1a --- /dev/null +++ b/src/agents/memory/sqlite_session.py @@ -0,0 +1,276 @@ +import asyncio +import json +import sqlite3 +import threading +from pathlib import Path +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ..items import TResponseInputItem + +from .session_abc import SessionABC + + +class SQLiteSession(SessionABC): + """SQLite-based implementation of session storage. + + This implementation stores conversation history in a SQLite database. + By default, uses an in-memory database that is lost when the process ends. + For persistent storage, provide a file path. + """ + + def __init__( + self, + session_id: str, + db_path: str | Path = ":memory:", + sessions_table: str = "agent_sessions", + messages_table: str = "agent_messages", + ): + """Initialize the SQLite session. + + Args: + session_id: Unique identifier for the conversation session + db_path: Path to the SQLite database file. Defaults to ':memory:' (in-memory database) + sessions_table: Name of the table to store session metadata. Defaults to + 'agent_sessions' + messages_table: Name of the table to store message data. Defaults to 'agent_messages' + """ + self.session_id = session_id + self.db_path = db_path + self.sessions_table = sessions_table + self.messages_table = messages_table + self._local = threading.local() + self._lock = threading.Lock() + + # For in-memory databases, we need a shared connection to avoid thread isolation + # For file databases, we use thread-local connections for better concurrency + self._is_memory_db = str(db_path) == ":memory:" + if self._is_memory_db: + self._shared_connection = sqlite3.connect(":memory:", check_same_thread=False) + self._shared_connection.execute("PRAGMA journal_mode=WAL") + self._init_db_for_connection(self._shared_connection) + else: + # For file databases, initialize the schema once since it persists + init_conn = sqlite3.connect(str(self.db_path), check_same_thread=False) + init_conn.execute("PRAGMA journal_mode=WAL") + self._init_db_for_connection(init_conn) + init_conn.close() + + def _get_connection(self) -> sqlite3.Connection: + """Get a database connection.""" + if self._is_memory_db: + # Use shared connection for in-memory database to avoid thread isolation + return self._shared_connection + else: + # Use thread-local connections for file databases + if not hasattr(self._local, "connection"): + self._local.connection = sqlite3.connect( + str(self.db_path), + check_same_thread=False, + ) + self._local.connection.execute("PRAGMA journal_mode=WAL") + assert isinstance(self._local.connection, sqlite3.Connection), ( + f"Expected sqlite3.Connection, got {type(self._local.connection)}" + ) + return self._local.connection + + def _init_db_for_connection(self, conn: sqlite3.Connection) -> None: + """Initialize the database schema for a specific connection.""" + conn.execute( + f""" + CREATE TABLE IF NOT EXISTS {self.sessions_table} ( + session_id TEXT PRIMARY KEY, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """ + ) + + conn.execute( + f""" + CREATE TABLE IF NOT EXISTS {self.messages_table} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + session_id TEXT NOT NULL, + message_data TEXT NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (session_id) REFERENCES {self.sessions_table} (session_id) + ON DELETE CASCADE + ) + """ + ) + + conn.execute( + f""" + CREATE INDEX IF NOT EXISTS idx_{self.messages_table}_session_id + ON {self.messages_table} (session_id, created_at) + """ + ) + + conn.commit() + + async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]: + """Retrieve the conversation history for this session. + + Args: + limit: Maximum number of items to retrieve. If None, retrieves all items. + When specified, returns the latest N items in chronological order. + + Returns: + List of input items representing the conversation history + """ + + def _get_items_sync(): + conn = self._get_connection() + with self._lock if self._is_memory_db else threading.Lock(): + if limit is None: + # Fetch all items in chronological order + cursor = conn.execute( + f""" + SELECT message_data FROM {self.messages_table} + WHERE session_id = ? + ORDER BY created_at ASC + """, + (self.session_id,), + ) + else: + # Fetch the latest N items in chronological order + cursor = conn.execute( + f""" + SELECT message_data FROM {self.messages_table} + WHERE session_id = ? + ORDER BY created_at DESC + LIMIT ? + """, + (self.session_id, limit), + ) + + rows = cursor.fetchall() + + # Reverse to get chronological order when using DESC + if limit is not None: + rows = list(reversed(rows)) + + items = [] + for (message_data,) in rows: + try: + item = json.loads(message_data) + items.append(item) + except json.JSONDecodeError: + # Skip invalid JSON entries + continue + + return items + + return await asyncio.to_thread(_get_items_sync) + + async def add_items(self, items: list[TResponseInputItem]) -> None: + """Add new items to the conversation history. + + Args: + items: List of input items to add to the history + """ + if not items: + return + + def _add_items_sync(): + conn = self._get_connection() + + with self._lock if self._is_memory_db else threading.Lock(): + # Ensure session exists + conn.execute( + f""" + INSERT OR IGNORE INTO {self.sessions_table} (session_id) VALUES (?) + """, + (self.session_id,), + ) + + # Add items + message_data = [(self.session_id, json.dumps(item)) for item in items] + conn.executemany( + f""" + INSERT INTO {self.messages_table} (session_id, message_data) VALUES (?, ?) + """, + message_data, + ) + + # Update session timestamp + conn.execute( + f""" + UPDATE {self.sessions_table} + SET updated_at = CURRENT_TIMESTAMP + WHERE session_id = ? + """, + (self.session_id,), + ) + + conn.commit() + + await asyncio.to_thread(_add_items_sync) + + async def pop_item(self) -> TResponseInputItem | None: + """Remove and return the most recent item from the session. + + Returns: + The most recent item if it exists, None if the session is empty + """ + + def _pop_item_sync(): + conn = self._get_connection() + with self._lock if self._is_memory_db else threading.Lock(): + # Use DELETE with RETURNING to atomically delete and return the most recent item + cursor = conn.execute( + f""" + DELETE FROM {self.messages_table} + WHERE id = ( + SELECT id FROM {self.messages_table} + WHERE session_id = ? + ORDER BY created_at DESC + LIMIT 1 + ) + RETURNING message_data + """, + (self.session_id,), + ) + + result = cursor.fetchone() + conn.commit() + + if result: + message_data = result[0] + try: + item = json.loads(message_data) + return item + except json.JSONDecodeError: + # Return None for corrupted JSON entries (already deleted) + return None + + return None + + return await asyncio.to_thread(_pop_item_sync) + + async def clear_session(self) -> None: + """Clear all items for this session.""" + + def _clear_session_sync(): + conn = self._get_connection() + with self._lock if self._is_memory_db else threading.Lock(): + conn.execute( + f"DELETE FROM {self.messages_table} WHERE session_id = ?", + (self.session_id,), + ) + conn.execute( + f"DELETE FROM {self.sessions_table} WHERE session_id = ?", + (self.session_id,), + ) + conn.commit() + + await asyncio.to_thread(_clear_session_sync) + + def close(self) -> None: + """Close the database connection.""" + if self._is_memory_db: + if hasattr(self, "_shared_connection"): + self._shared_connection.close() + else: + if hasattr(self._local, "connection"): + self._local.connection.close() diff --git a/uv.lock b/uv.lock index e4945864d..a30cf9a5a 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.10'", @@ -450,6 +450,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, +] + [[package]] name = "eval-type-backport" version = "0.2.2" @@ -1498,6 +1507,9 @@ dependencies = [ litellm = [ { name = "litellm" }, ] +pymongo = [ + { name = "pymongo" }, +] realtime = [ { name = "websockets" }, ] @@ -1543,13 +1555,14 @@ requires-dist = [ { name = "numpy", marker = "python_full_version >= '3.10' and extra == 'voice'", specifier = ">=2.2.0,<3" }, { name = "openai", specifier = ">=1.97.1,<2" }, { name = "pydantic", specifier = ">=2.10,<3" }, + { name = "pymongo", marker = "extra == 'pymongo'", specifier = ">=4.13.2" }, { name = "requests", specifier = ">=2.0,<3" }, { name = "types-requests", specifier = ">=2.0,<3" }, { name = "typing-extensions", specifier = ">=4.12.2,<5" }, { name = "websockets", marker = "extra == 'realtime'", specifier = ">=15.0,<16" }, { name = "websockets", marker = "extra == 'voice'", specifier = ">=15.0,<16" }, ] -provides-extras = ["voice", "viz", "litellm", "realtime"] +provides-extras = ["voice", "viz", "litellm", "realtime", "pymongo"] [package.metadata.requires-dev] dev = [ @@ -1925,6 +1938,73 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/eb/f5/b9e2a42aa8f9e34d52d66de87941ecd236570c7ed2e87775ed23bbe4e224/pymdown_extensions-10.14.3-py3-none-any.whl", hash = "sha256:05e0bee73d64b9c71a4ae17c72abc2f700e8bc8403755a00580b49a4e9f189e9", size = 264467, upload-time = "2025-02-01T15:43:13.995Z" }, ] +[[package]] +name = "pymongo" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/5a/d664298bf54762f0c89b8aa2c276868070e06afb853b4a8837de5741e5f9/pymongo-4.13.2.tar.gz", hash = "sha256:0f64c6469c2362962e6ce97258ae1391abba1566a953a492562d2924b44815c2", size = 2167844, upload-time = "2025-06-16T18:16:30.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/a8/293dfd3accda06ae94c54e7c15ac5108614d31263708236b4743554ad6ee/pymongo-4.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:01065eb1838e3621a30045ab14d1a60ee62e01f65b7cf154e69c5c722ef14d2f", size = 802768, upload-time = "2025-06-16T18:14:39.521Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7f/2cbc897dd2867b9b5f8e9e6587dc4bf23e3777a4ddd712064ed21aea99e0/pymongo-4.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ab0325d436075f5f1901cde95afae811141d162bc42d9a5befb647fda585ae6", size = 803053, upload-time = "2025-06-16T18:14:43.318Z" }, + { url = "https://files.pythonhosted.org/packages/b6/da/07cdbaf507cccfdac837f612ea276523d2cdd380c5253c86ceae0369f0e2/pymongo-4.13.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdd8041902963c84dc4e27034fa045ac55fabcb2a4ba5b68b880678557573e70", size = 1180427, upload-time = "2025-06-16T18:14:44.841Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5c/5f61269c87e565a6f4016e644e2bd20473b4b5a47c362ad3d57a1428ef33/pymongo-4.13.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b00ab04630aa4af97294e9abdbe0506242396269619c26f5761fd7b2524ef501", size = 1214655, upload-time = "2025-06-16T18:14:46.635Z" }, + { url = "https://files.pythonhosted.org/packages/26/51/757ee06299e2bb61c0ae7b886ca845a78310cf94fc95bbc044bbe7892392/pymongo-4.13.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16440d0da30ba804c6c01ea730405fdbbb476eae760588ea09e6e7d28afc06de", size = 1197586, upload-time = "2025-06-16T18:14:48.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/a8/9ddf0ad0884046c34c5eb3de9a944c47d37e39989ae782ded2b207462a97/pymongo-4.13.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad9a2d1357aed5d6750deb315f62cb6f5b3c4c03ffb650da559cb09cb29e6fe8", size = 1183599, upload-time = "2025-06-16T18:14:49.576Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/61b289b440e77524e4b0d6881f6c6f50cf9a55a72b5ba2adaa43d70531e6/pymongo-4.13.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c793223aef21a8c415c840af1ca36c55a05d6fa3297378da35de3fb6661c0174", size = 1162761, upload-time = "2025-06-16T18:14:51.558Z" }, + { url = "https://files.pythonhosted.org/packages/05/22/bd328cedc79768ab03942fd828f0cd1d50a3ae2c3caf3aebad65a644eb75/pymongo-4.13.2-cp310-cp310-win32.whl", hash = "sha256:8ef6ae029a3390565a0510c872624514dde350007275ecd8126b09175aa02cca", size = 790062, upload-time = "2025-06-16T18:14:53.024Z" }, + { url = "https://files.pythonhosted.org/packages/9f/70/2d8bbdac28e869cebb8081a43f8b16c6dd2384f6aef28fcc6ec0693a7042/pymongo-4.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:66f168f8c5b1e2e3d518507cf9f200f0c86ac79e2b2be9e7b6c8fd1e2f7d7824", size = 800198, upload-time = "2025-06-16T18:14:54.481Z" }, + { url = "https://files.pythonhosted.org/packages/94/df/4c4ef17b48c70120f834ba7151860c300924915696c4a57170cb5b09787f/pymongo-4.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7af8c56d0a7fcaf966d5292e951f308fb1f8bac080257349e14742725fd7990d", size = 857145, upload-time = "2025-06-16T18:14:56.516Z" }, + { url = "https://files.pythonhosted.org/packages/e7/41/480ca82b3b3320fc70fe699a01df28db15a4ea154c8759ab4a437a74c808/pymongo-4.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad24f5864706f052b05069a6bc59ff875026e28709548131448fe1e40fc5d80f", size = 857437, upload-time = "2025-06-16T18:14:58.572Z" }, + { url = "https://files.pythonhosted.org/packages/50/d4/eb74e98ea980a5e1ec4f06f383ec6c52ab02076802de24268f477ef616d2/pymongo-4.13.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a10069454195d1d2dda98d681b1dbac9a425f4b0fe744aed5230c734021c1cb9", size = 1426516, upload-time = "2025-06-16T18:15:00.589Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fe/c5960c0e6438bd489367261e5ef1a5db01e34349f0dbf7529fb938d3d2ef/pymongo-4.13.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e20862b81e3863bcd72334e3577a3107604553b614a8d25ee1bb2caaea4eb90", size = 1477477, upload-time = "2025-06-16T18:15:02.283Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9f/ef4395175fc97876978736c8493d8ffa4d13aa7a4e12269a2cb0d52a1246/pymongo-4.13.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b4d5794ca408317c985d7acfb346a60f96f85a7c221d512ff0ecb3cce9d6110", size = 1451921, upload-time = "2025-06-16T18:15:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b9/397cb2a3ec03f880e882102eddcb46c3d516c6cf47a05f44db48067924d9/pymongo-4.13.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8e0420fb4901006ae7893e76108c2a36a343b4f8922466d51c45e9e2ceb717", size = 1431045, upload-time = "2025-06-16T18:15:06.392Z" }, + { url = "https://files.pythonhosted.org/packages/f5/0d/e150a414e5cb07f2fefca817fa071a6da8d96308469a85a777244c8c4337/pymongo-4.13.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:239b5f83b83008471d54095e145d4c010f534af99e87cc8877fc6827736451a0", size = 1399697, upload-time = "2025-06-16T18:15:08.975Z" }, + { url = "https://files.pythonhosted.org/packages/b8/29/5190eafb994721c30a38a8a62df225c47a9da364ab5c8cffe90aabf6a54e/pymongo-4.13.2-cp311-cp311-win32.whl", hash = "sha256:6bceb524110c32319eb7119422e400dbcafc5b21bcc430d2049a894f69b604e5", size = 836261, upload-time = "2025-06-16T18:15:10.459Z" }, + { url = "https://files.pythonhosted.org/packages/d3/da/30bdcc83b23fc4f2996b39b41b2ff0ff2184230a78617c7b8636aac4d81d/pymongo-4.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:ab87484c97ae837b0a7bbdaa978fa932fbb6acada3f42c3b2bee99121a594715", size = 851451, upload-time = "2025-06-16T18:15:12.181Z" }, + { url = "https://files.pythonhosted.org/packages/03/e0/0e187750e23eed4227282fcf568fdb61f2b53bbcf8cbe3a71dde2a860d12/pymongo-4.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ec89516622dfc8b0fdff499612c0bd235aa45eeb176c9e311bcc0af44bf952b6", size = 912004, upload-time = "2025-06-16T18:15:14.299Z" }, + { url = "https://files.pythonhosted.org/packages/57/c2/9b79795382daaf41e5f7379bffdef1880d68160adea352b796d6948cb5be/pymongo-4.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f30eab4d4326df54fee54f31f93e532dc2918962f733ee8e115b33e6fe151d92", size = 911698, upload-time = "2025-06-16T18:15:16.334Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e4/f04dc9ed5d1d9dbc539dc2d8758dd359c5373b0e06fcf25418b2c366737c/pymongo-4.13.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cce9428d12ba396ea245fc4c51f20228cead01119fcc959e1c80791ea45f820", size = 1690357, upload-time = "2025-06-16T18:15:18.358Z" }, + { url = "https://files.pythonhosted.org/packages/bb/de/41478a7d527d38f1b98b084f4a78bbb805439a6ebd8689fbbee0a3dfacba/pymongo-4.13.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac9241b727a69c39117c12ac1e52d817ea472260dadc66262c3fdca0bab0709b", size = 1754593, upload-time = "2025-06-16T18:15:20.096Z" }, + { url = "https://files.pythonhosted.org/packages/df/d9/8fa2eb110291e154f4312779b1a5b815090b8b05a59ecb4f4a32427db1df/pymongo-4.13.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3efc4c515b371a9fa1d198b6e03340985bfe1a55ae2d2b599a714934e7bc61ab", size = 1723637, upload-time = "2025-06-16T18:15:22.048Z" }, + { url = "https://files.pythonhosted.org/packages/27/7b/9863fa60a4a51ea09f5e3cd6ceb231af804e723671230f2daf3bd1b59c2b/pymongo-4.13.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f57a664aa74610eb7a52fa93f2cf794a1491f4f76098343485dd7da5b3bcff06", size = 1693613, upload-time = "2025-06-16T18:15:24.866Z" }, + { url = "https://files.pythonhosted.org/packages/9b/89/a42efa07820a59089836f409a63c96e7a74e33313e50dc39c554db99ac42/pymongo-4.13.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dcb0b8cdd499636017a53f63ef64cf9b6bd3fd9355796c5a1d228e4be4a4c94", size = 1652745, upload-time = "2025-06-16T18:15:27.078Z" }, + { url = "https://files.pythonhosted.org/packages/6a/cf/2c77d1acda61d281edd3e3f00d5017d3fac0c29042c769efd3b8018cb469/pymongo-4.13.2-cp312-cp312-win32.whl", hash = "sha256:bf43ae07804d7762b509f68e5ec73450bb8824e960b03b861143ce588b41f467", size = 883232, upload-time = "2025-06-16T18:15:29.169Z" }, + { url = "https://files.pythonhosted.org/packages/d2/4f/727f59156e3798850c3c2901f106804053cb0e057ed1bd9883f5fa5aa8fa/pymongo-4.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:812a473d584bcb02ab819d379cd5e752995026a2bb0d7713e78462b6650d3f3a", size = 903304, upload-time = "2025-06-16T18:15:31.346Z" }, + { url = "https://files.pythonhosted.org/packages/e0/95/b44b8e24b161afe7b244f6d43c09a7a1f93308cad04198de1c14c67b24ce/pymongo-4.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d6044ca0eb74d97f7d3415264de86a50a401b7b0b136d30705f022f9163c3124", size = 966232, upload-time = "2025-06-16T18:15:33.057Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/d4d59799a52033acb187f7bd1f09bc75bebb9fd12cef4ba2964d235ad3f9/pymongo-4.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dd326bcb92d28d28a3e7ef0121602bad78691b6d4d1f44b018a4616122f1ba8b", size = 965935, upload-time = "2025-06-16T18:15:34.826Z" }, + { url = "https://files.pythonhosted.org/packages/07/a8/67502899d89b317ea9952e4769bc193ca15efee561b24b38a86c59edde6f/pymongo-4.13.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfb0c21bdd58e58625c9cd8de13e859630c29c9537944ec0a14574fdf88c2ac4", size = 1954070, upload-time = "2025-06-16T18:15:36.576Z" }, + { url = "https://files.pythonhosted.org/packages/da/3b/0dac5d81d1af1b96b3200da7ccc52fc261a35efb7d2ac493252eb40a2b11/pymongo-4.13.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9c7d345d57f17b1361008aea78a37e8c139631a46aeb185dd2749850883c7ba", size = 2031424, upload-time = "2025-06-16T18:15:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/31/ed/7a5af49a153224ca7e31e9915703e612ad9c45808cc39540e9dd1a2a7537/pymongo-4.13.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8860445a8da1b1545406fab189dc20319aff5ce28e65442b2b4a8f4228a88478", size = 1995339, upload-time = "2025-06-16T18:15:40.474Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e9/9c72eceae8439c4f1bdebc4e6b290bf035e3f050a80eeb74abb5e12ef8e2/pymongo-4.13.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c184b612f67d5a4c8f864ae7c40b6cc33c0e9bb05e39d08666f8831d120504", size = 1956066, upload-time = "2025-06-16T18:15:42.272Z" }, + { url = "https://files.pythonhosted.org/packages/ac/79/9b019c47923395d5fced03856996465fb9340854b0f5a2ddf16d47e2437c/pymongo-4.13.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ea8c62d5f3c6529407c12471385d9a05f9fb890ce68d64976340c85cd661b", size = 1905642, upload-time = "2025-06-16T18:15:43.978Z" }, + { url = "https://files.pythonhosted.org/packages/93/2f/ebf56c7fa9298fa2f9716e7b66cf62b29e7fc6e11774f3b87f55d214d466/pymongo-4.13.2-cp313-cp313-win32.whl", hash = "sha256:d13556e91c4a8cb07393b8c8be81e66a11ebc8335a40fa4af02f4d8d3b40c8a1", size = 930184, upload-time = "2025-06-16T18:15:46.899Z" }, + { url = "https://files.pythonhosted.org/packages/76/2f/49c35464cbd5d116d950ff5d24b4b20491aaae115d35d40b945c33b29250/pymongo-4.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cfc69d7bc4d4d5872fd1e6de25e6a16e2372c7d5556b75c3b8e2204dce73e3fb", size = 955111, upload-time = "2025-06-16T18:15:48.85Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/b17c8b5329b1842b7847cf0fa224ef0a272bf2e5126360f4da8065c855a1/pymongo-4.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a457d2ac34c05e9e8a6bb724115b093300bf270f0655fb897df8d8604b2e3700", size = 1022735, upload-time = "2025-06-16T18:15:50.672Z" }, + { url = "https://files.pythonhosted.org/packages/83/e6/66fec65a7919bf5f35be02e131b4dc4bf3152b5e8d78cd04b6d266a44514/pymongo-4.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:02f131a6e61559613b1171b53fbe21fed64e71b0cb4858c47fc9bc7c8e0e501c", size = 1022740, upload-time = "2025-06-16T18:15:53.218Z" }, + { url = "https://files.pythonhosted.org/packages/17/92/cda7383df0d5e71dc007f172c1ecae6313d64ea05d82bbba06df7f6b3e49/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c942d1c6334e894271489080404b1a2e3b8bd5de399f2a0c14a77d966be5bc9", size = 2282430, upload-time = "2025-06-16T18:15:55.356Z" }, + { url = "https://files.pythonhosted.org/packages/84/da/285e05eb1d617b30dc7a7a98ebeb264353a8903e0e816a4eec6487c81f18/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:850168d115680ab66a0931a6aa9dd98ed6aa5e9c3b9a6c12128049b9a5721bc5", size = 2369470, upload-time = "2025-06-16T18:15:57.5Z" }, + { url = "https://files.pythonhosted.org/packages/89/c0/c0d5eae236de9ca293497dc58fc1e4872382223c28ec223f76afc701392c/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af7dfff90647ee77c53410f7fe8ca4fe343f8b768f40d2d0f71a5602f7b5a541", size = 2328857, upload-time = "2025-06-16T18:15:59.59Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5a/d8639fba60def128ce9848b99c56c54c8a4d0cd60342054cd576f0bfdf26/pymongo-4.13.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8057f9bc9c94a8fd54ee4f5e5106e445a8f406aff2df74746f21c8791ee2403", size = 2280053, upload-time = "2025-06-16T18:16:02.166Z" }, + { url = "https://files.pythonhosted.org/packages/a1/69/d56f0897cc4932a336820c5d2470ffed50be04c624b07d1ad6ea75aaa975/pymongo-4.13.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51040e1ba78d6671f8c65b29e2864483451e789ce93b1536de9cc4456ede87fa", size = 2219378, upload-time = "2025-06-16T18:16:04.108Z" }, + { url = "https://files.pythonhosted.org/packages/04/1e/427e7f99801ee318b6331062d682d3816d7e1d6b6013077636bd75d49c87/pymongo-4.13.2-cp313-cp313t-win32.whl", hash = "sha256:7ab86b98a18c8689514a9f8d0ec7d9ad23a949369b31c9a06ce4a45dcbffcc5e", size = 979460, upload-time = "2025-06-16T18:16:06.128Z" }, + { url = "https://files.pythonhosted.org/packages/b5/9c/00301a6df26f0f8d5c5955192892241e803742e7c3da8c2c222efabc0df6/pymongo-4.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c38168263ed94a250fc5cf9c6d33adea8ab11c9178994da1c3481c2a49d235f8", size = 1011057, upload-time = "2025-06-16T18:16:07.917Z" }, + { url = "https://files.pythonhosted.org/packages/ab/08/e409aaf371d2aaf11ba2cb3a7a14dd5d3e8a78b8aa9d3472a198bf2d9179/pymongo-4.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a89739a86da31adcef41f6c3ae62b38a8bad156bba71fe5898871746c5af83", size = 748386, upload-time = "2025-06-16T18:16:09.657Z" }, + { url = "https://files.pythonhosted.org/packages/13/f3/0a590c94291c9e4e88ff7e9ce51b16fbef82a362681f08e6f173b67b55df/pymongo-4.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de529aebd1ddae2de778d926b3e8e2e42a9b37b5c668396aad8f28af75e606f9", size = 748674, upload-time = "2025-06-16T18:16:11.44Z" }, + { url = "https://files.pythonhosted.org/packages/76/fb/03e35ad0a23a6ed8a4707392ed6b25d145692191201940b9001ba627697b/pymongo-4.13.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cc7d4cd7586c1c4f7af2b97447404046c2d8e7ed4c7214ed0e21dbeb17d57d", size = 936341, upload-time = "2025-06-16T18:16:13.198Z" }, + { url = "https://files.pythonhosted.org/packages/0c/53/1e9c0d642268e641105e64bb7119e97ee213d4f17c376dc2dd29b24ac08b/pymongo-4.13.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:884cb88a9d4c4c9810056b9c71817bd9714bbe58c461f32b65be60c56759823b", size = 953682, upload-time = "2025-06-16T18:16:15.538Z" }, + { url = "https://files.pythonhosted.org/packages/4c/71/f195afd6e1cadbd5ff2abec451b2022f78e9486052388d8dc362a419466b/pymongo-4.13.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389cb6415ec341c73f81fbf54970ccd0cd5d3fa7c238dcdb072db051d24e2cb4", size = 945348, upload-time = "2025-06-16T18:16:17.292Z" }, + { url = "https://files.pythonhosted.org/packages/7f/e1/c6ab4ee8d78447d6fb24a62f36234cd41476c94cf238a4c87a34ef7d68ba/pymongo-4.13.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49f9968ea7e6a86d4c9bd31d2095f0419efc498ea5e6067e75ade1f9e64aea3d", size = 938240, upload-time = "2025-06-16T18:16:19.119Z" }, + { url = "https://files.pythonhosted.org/packages/d4/61/21628680899f2d72b617a49168b0b9a8c29c7a33e899296f5c71e39e3727/pymongo-4.13.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae07315bb106719c678477e61077cd28505bb7d3fd0a2341e75a9510118cb785", size = 927900, upload-time = "2025-06-16T18:16:21.346Z" }, + { url = "https://files.pythonhosted.org/packages/27/3f/97d23f419a3ee72da3c29d9d41519e03bfcc619d8d2770630006138031be/pymongo-4.13.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4dc60b3f5e1448fd011c729ad5d8735f603b0a08a8773ec8e34a876ccc7de45f", size = 911155, upload-time = "2025-06-16T18:16:23.31Z" }, + { url = "https://files.pythonhosted.org/packages/65/a1/83990c9d6ea16937009b7af2ce705ee8c5d4f1e740c8ecd73ac8950e00bb/pymongo-4.13.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:75462d6ce34fb2dd98f8ac3732a7a1a1fbb2e293c4f6e615766731d044ad730e", size = 937562, upload-time = "2025-06-16T18:16:25.17Z" }, + { url = "https://files.pythonhosted.org/packages/48/73/e93ce27900ebfb19ecd987eef9d8d56b86ce065002504fd0c6958af93197/pymongo-4.13.2-cp39-cp39-win32.whl", hash = "sha256:b7e04c45f6a7d5a13fe064f42130d29b0730cb83dd387a623563ff3b9bd2f4d1", size = 743873, upload-time = "2025-06-16T18:16:26.938Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/3abc30b68df1ea2e0ab5d5245039c0c062d0a411b0e327bd986370deff05/pymongo-4.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:0603145c9be5e195ae61ba7a93eb283abafdbd87f6f30e6c2dfc242940fe280c", size = 748952, upload-time = "2025-06-16T18:16:28.759Z" }, +] + [[package]] name = "pynput" version = "1.8.1"