diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e83f1f8d4..f53f270c0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.12.8" + rev: "v0.12.9" hooks: - id: ruff args: ["--fix"] diff --git a/pyproject.toml b/pyproject.toml index c14a1e587..d4e935600 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [{ name = "Litestar Developers", email = "hello@litestar.dev" }] name = "sqlspec" readme = "README.md" requires-python = ">=3.9, <4.0" -version = "0.17.1" +version = "0.18.0" [project.urls] Discord = "https://discord.gg/litestar" diff --git a/sqlspec/_sql.py b/sqlspec/_sql.py index 0a393f3fb..10ed99321 100644 --- a/sqlspec/_sql.py +++ b/sqlspec/_sql.py @@ -42,11 +42,11 @@ ) from sqlspec.builder.mixins._join_operations import JoinBuilder from sqlspec.builder.mixins._select_operations import Case, SubqueryBuilder, WindowFunctionBuilder +from sqlspec.core.statement import SQL from sqlspec.exceptions import SQLBuilderError if TYPE_CHECKING: from sqlspec.builder._expression_wrappers import ExpressionWrapper - from sqlspec.core.statement import SQL __all__ = ( @@ -285,9 +285,7 @@ def create_table(self, table_name: str, dialect: DialectType = None) -> "CreateT Returns: CreateTable builder instance """ - builder = CreateTable(table_name) - builder.dialect = dialect or self.dialect - return builder + return CreateTable(table_name, dialect=dialect or self.dialect) def create_table_as_select(self, dialect: DialectType = None) -> "CreateTableAsSelect": """Create a CREATE TABLE AS SELECT builder. @@ -298,35 +296,31 @@ def create_table_as_select(self, dialect: DialectType = None) -> "CreateTableAsS Returns: CreateTableAsSelect builder instance """ - builder = CreateTableAsSelect() - builder.dialect = dialect or self.dialect - return builder + return CreateTableAsSelect(dialect=dialect or self.dialect) - def create_view(self, dialect: DialectType = None) -> "CreateView": + def create_view(self, view_name: str, dialect: DialectType = None) -> "CreateView": """Create a CREATE VIEW builder. Args: + view_name: Name of the view to create dialect: Optional SQL dialect Returns: CreateView builder instance """ - builder = CreateView() - builder.dialect = dialect or self.dialect - return builder + return CreateView(view_name, dialect=dialect or self.dialect) - def create_materialized_view(self, dialect: DialectType = None) -> "CreateMaterializedView": + def create_materialized_view(self, view_name: str, dialect: DialectType = None) -> "CreateMaterializedView": """Create a CREATE MATERIALIZED VIEW builder. Args: + view_name: Name of the materialized view to create dialect: Optional SQL dialect Returns: CreateMaterializedView builder instance """ - builder = CreateMaterializedView() - builder.dialect = dialect or self.dialect - return builder + return CreateMaterializedView(view_name, dialect=dialect or self.dialect) def create_index(self, index_name: str, dialect: DialectType = None) -> "CreateIndex": """Create a CREATE INDEX builder. @@ -340,18 +334,17 @@ def create_index(self, index_name: str, dialect: DialectType = None) -> "CreateI """ return CreateIndex(index_name, dialect=dialect or self.dialect) - def create_schema(self, dialect: DialectType = None) -> "CreateSchema": + def create_schema(self, schema_name: str, dialect: DialectType = None) -> "CreateSchema": """Create a CREATE SCHEMA builder. Args: + schema_name: Name of the schema to create dialect: Optional SQL dialect Returns: CreateSchema builder instance """ - builder = CreateSchema() - builder.dialect = dialect or self.dialect - return builder + return CreateSchema(schema_name, dialect=dialect or self.dialect) def drop_table(self, table_name: str, dialect: DialectType = None) -> "DropTable": """Create a DROP TABLE builder. @@ -365,16 +358,17 @@ def drop_table(self, table_name: str, dialect: DialectType = None) -> "DropTable """ return DropTable(table_name, dialect=dialect or self.dialect) - def drop_view(self, dialect: DialectType = None) -> "DropView": + def drop_view(self, view_name: str, dialect: DialectType = None) -> "DropView": """Create a DROP VIEW builder. Args: + view_name: Name of the view to drop dialect: Optional SQL dialect Returns: DropView builder instance """ - return DropView(dialect=dialect or self.dialect) + return DropView(view_name, dialect=dialect or self.dialect) def drop_index(self, index_name: str, dialect: DialectType = None) -> "DropIndex": """Create a DROP INDEX builder. @@ -388,16 +382,17 @@ def drop_index(self, index_name: str, dialect: DialectType = None) -> "DropIndex """ return DropIndex(index_name, dialect=dialect or self.dialect) - def drop_schema(self, dialect: DialectType = None) -> "DropSchema": + def drop_schema(self, schema_name: str, dialect: DialectType = None) -> "DropSchema": """Create a DROP SCHEMA builder. Args: + schema_name: Name of the schema to drop dialect: Optional SQL dialect Returns: DropSchema builder instance """ - return DropSchema(dialect=dialect or self.dialect) + return DropSchema(schema_name, dialect=dialect or self.dialect) def alter_table(self, table_name: str, dialect: DialectType = None) -> "AlterTable": """Create an ALTER TABLE builder. @@ -409,22 +404,19 @@ def alter_table(self, table_name: str, dialect: DialectType = None) -> "AlterTab Returns: AlterTable builder instance """ - builder = AlterTable(table_name) - builder.dialect = dialect or self.dialect - return builder + return AlterTable(table_name, dialect=dialect or self.dialect) - def rename_table(self, dialect: DialectType = None) -> "RenameTable": + def rename_table(self, old_name: str, dialect: DialectType = None) -> "RenameTable": """Create a RENAME TABLE builder. Args: + old_name: Current name of the table dialect: Optional SQL dialect Returns: RenameTable builder instance """ - builder = RenameTable() - builder.dialect = dialect or self.dialect - return builder + return RenameTable(old_name, dialect=dialect or self.dialect) def comment_on(self, dialect: DialectType = None) -> "CommentOn": """Create a COMMENT ON builder. @@ -435,9 +427,7 @@ def comment_on(self, dialect: DialectType = None) -> "CommentOn": Returns: CommentOn builder instance """ - builder = CommentOn() - builder.dialect = dialect or self.dialect - return builder + return CommentOn(dialect=dialect or self.dialect) # =================== # SQL Analysis Helpers @@ -746,7 +736,6 @@ def raw(sql_fragment: str, **parameters: Any) -> "Union[exp.Expression, SQL]": raise SQLBuilderError(msg) from e # New behavior - return SQL statement with parameters - from sqlspec.core.statement import SQL return SQL(sql_fragment, parameters) @@ -1331,9 +1320,7 @@ def truncate(self, table_name: str) -> "Truncate": ) ``` """ - builder = Truncate(dialect=self.dialect) - builder._table_name = table_name - return builder + return Truncate(table_name, dialect=self.dialect) # =================== # Case Expressions diff --git a/sqlspec/adapters/adbc/driver.py b/sqlspec/adapters/adbc/driver.py index 315001bd7..f28c1e0e9 100644 --- a/sqlspec/adapters/adbc/driver.py +++ b/sqlspec/adapters/adbc/driver.py @@ -48,7 +48,7 @@ "postgres": (ParameterStyle.NUMERIC, [ParameterStyle.NUMERIC]), "postgresql": (ParameterStyle.NUMERIC, [ParameterStyle.NUMERIC]), "bigquery": (ParameterStyle.NAMED_AT, [ParameterStyle.NAMED_AT]), - "sqlite": (ParameterStyle.QMARK, [ParameterStyle.QMARK, ParameterStyle.NAMED_COLON]), + "sqlite": (ParameterStyle.QMARK, [ParameterStyle.QMARK]), "duckdb": (ParameterStyle.QMARK, [ParameterStyle.QMARK, ParameterStyle.NUMERIC, ParameterStyle.NAMED_DOLLAR]), "mysql": (ParameterStyle.POSITIONAL_PYFORMAT, [ParameterStyle.POSITIONAL_PYFORMAT, ParameterStyle.NAMED_PYFORMAT]), "snowflake": (ParameterStyle.QMARK, [ParameterStyle.QMARK, ParameterStyle.NUMERIC]), diff --git a/sqlspec/adapters/aiosqlite/pool.py b/sqlspec/adapters/aiosqlite/pool.py index 286c4049c..63056ab44 100644 --- a/sqlspec/adapters/aiosqlite/pool.py +++ b/sqlspec/adapters/aiosqlite/pool.py @@ -123,15 +123,15 @@ class AiosqliteConnectionPool: """Multi-connection pool for aiosqlite with proper shutdown handling.""" __slots__ = ( - "_closed_event", + "_closed_event_instance", "_connect_timeout", "_connection_parameters", "_connection_registry", "_idle_timeout", - "_lock", + "_lock_instance", "_operation_timeout", "_pool_size", - "_queue", + "_queue_instance", "_tracked_threads", "_wal_initialized", ) @@ -159,13 +159,36 @@ def __init__( self._idle_timeout = idle_timeout self._operation_timeout = operation_timeout - self._queue: asyncio.Queue[AiosqlitePoolConnection] = asyncio.Queue(maxsize=pool_size) self._connection_registry: dict[str, AiosqlitePoolConnection] = {} - self._lock = asyncio.Lock() - self._closed_event = asyncio.Event() self._tracked_threads: set[Union[threading.Thread, AiosqliteConnection]] = set() self._wal_initialized = False + # Lazy initialization for Python 3.9 compatibility (asyncio objects can't be created without event loop) + self._queue_instance: Optional[asyncio.Queue[AiosqlitePoolConnection]] = None + self._lock_instance: Optional[asyncio.Lock] = None + self._closed_event_instance: Optional[asyncio.Event] = None + + @property + def _queue(self) -> "asyncio.Queue[AiosqlitePoolConnection]": + """Lazy initialization of asyncio.Queue for Python 3.9 compatibility.""" + if self._queue_instance is None: + self._queue_instance = asyncio.Queue(maxsize=self._pool_size) + return self._queue_instance + + @property + def _lock(self) -> asyncio.Lock: + """Lazy initialization of asyncio.Lock for Python 3.9 compatibility.""" + if self._lock_instance is None: + self._lock_instance = asyncio.Lock() + return self._lock_instance + + @property + def _closed_event(self) -> asyncio.Event: + """Lazy initialization of asyncio.Event for Python 3.9 compatibility.""" + if self._closed_event_instance is None: + self._closed_event_instance = asyncio.Event() + return self._closed_event_instance + @property def is_closed(self) -> bool: """Check if pool is closed. @@ -173,7 +196,7 @@ def is_closed(self) -> bool: Returns: True if pool is closed """ - return self._closed_event.is_set() + return self._closed_event_instance is not None and self._closed_event.is_set() def size(self) -> int: """Get total number of connections in pool. @@ -189,6 +212,8 @@ def checked_out(self) -> int: Returns: Number of connections currently in use """ + if self._queue_instance is None: + return len(self._connection_registry) return len(self._connection_registry) - self._queue.qsize() def _track_aiosqlite_thread(self, connection: "AiosqliteConnection") -> None: diff --git a/sqlspec/adapters/asyncmy/config.py b/sqlspec/adapters/asyncmy/config.py index c1c1c8709..51ccc6479 100644 --- a/sqlspec/adapters/asyncmy/config.py +++ b/sqlspec/adapters/asyncmy/config.py @@ -107,7 +107,11 @@ async def _create_pool(self) -> "Pool": # pyright: ignore async def _close_pool(self) -> None: """Close the actual async connection pool.""" if self.pool_instance: - await self.pool_instance.close() + self.pool_instance.close() + + async def close_pool(self) -> None: + """Close the connection pool.""" + await self._close_pool() async def create_connection(self) -> AsyncmyConnection: # pyright: ignore """Create a single async connection (not from pool). diff --git a/sqlspec/adapters/asyncpg/config.py b/sqlspec/adapters/asyncpg/config.py index db5e7c629..b4a0748d4 100644 --- a/sqlspec/adapters/asyncpg/config.py +++ b/sqlspec/adapters/asyncpg/config.py @@ -144,6 +144,10 @@ async def _close_pool(self) -> None: if self.pool_instance: await self.pool_instance.close() + async def close_pool(self) -> None: + """Close the connection pool.""" + await self._close_pool() + async def create_connection(self) -> "AsyncpgConnection": """Create a single async connection from the pool. diff --git a/sqlspec/adapters/duckdb/driver.py b/sqlspec/adapters/duckdb/driver.py index 06b68bdf6..61c5a0265 100644 --- a/sqlspec/adapters/duckdb/driver.py +++ b/sqlspec/adapters/duckdb/driver.py @@ -45,11 +45,7 @@ default_parameter_style=ParameterStyle.QMARK, supported_parameter_styles={ParameterStyle.QMARK, ParameterStyle.NUMERIC, ParameterStyle.NAMED_DOLLAR}, default_execution_parameter_style=ParameterStyle.QMARK, - supported_execution_parameter_styles={ - ParameterStyle.QMARK, - ParameterStyle.NUMERIC, - ParameterStyle.NAMED_DOLLAR, - }, + supported_execution_parameter_styles={ParameterStyle.QMARK, ParameterStyle.NUMERIC}, type_coercion_map={}, has_native_list_expansion=True, needs_static_script_compilation=False, diff --git a/sqlspec/adapters/oracledb/config.py b/sqlspec/adapters/oracledb/config.py index 397d5b9f7..b27d589b2 100644 --- a/sqlspec/adapters/oracledb/config.py +++ b/sqlspec/adapters/oracledb/config.py @@ -239,6 +239,10 @@ async def _close_pool(self) -> None: if self.pool_instance: await self.pool_instance.close() + async def close_pool(self) -> None: + """Close the connection pool.""" + await self._close_pool() + async def create_connection(self) -> OracleAsyncConnection: """Create a single async connection (not from pool). diff --git a/sqlspec/adapters/oracledb/migrations.py b/sqlspec/adapters/oracledb/migrations.py new file mode 100644 index 000000000..9daf5588b --- /dev/null +++ b/sqlspec/adapters/oracledb/migrations.py @@ -0,0 +1,274 @@ +"""Oracle-specific migration implementations. + +This module provides Oracle Database-specific overrides for migration functionality +to handle Oracle's unique SQL syntax requirements. +""" + +import getpass +from typing import TYPE_CHECKING, Any, Optional, cast + +from sqlspec._sql import sql +from sqlspec.builder._ddl import CreateTable +from sqlspec.migrations.base import BaseMigrationTracker +from sqlspec.utils.logging import get_logger + +if TYPE_CHECKING: + from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase + +__all__ = ("OracleAsyncMigrationTracker", "OracleSyncMigrationTracker") + +logger = get_logger("migrations.oracle") + + +class OracleMigrationTrackerMixin: + """Mixin providing Oracle-specific migration table creation.""" + + version_table: str # This will be set by the base class + + def _get_create_table_sql(self) -> CreateTable: + """Get Oracle-specific SQL builder for creating the tracking table. + + Oracle doesn't support: + - CREATE TABLE IF NOT EXISTS (need try/catch logic) + - TEXT type (use VARCHAR2) + - DEFAULT before NOT NULL is required + + Returns: + SQL builder object for Oracle table creation. + """ + return ( + sql.create_table(self.version_table) + .column("version_num", "VARCHAR2(32)", primary_key=True) + .column("description", "VARCHAR2(2000)") + .column("applied_at", "TIMESTAMP", default="CURRENT_TIMESTAMP") + .column("execution_time_ms", "INTEGER") + .column("checksum", "VARCHAR2(64)") + .column("applied_by", "VARCHAR2(255)") + ) + + +class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTracker["SyncDriverAdapterBase"]): + """Oracle-specific sync migration tracker.""" + + def ensure_tracking_table(self, driver: "SyncDriverAdapterBase") -> None: + """Create the migration tracking table if it doesn't exist. + + Oracle doesn't support IF NOT EXISTS, so we check for table existence first. + + Args: + driver: The database driver to use. + """ + # Check if table already exists using Oracle's system views + check_sql = ( + sql.select(sql.count().as_("table_count")) + .from_("user_tables") + .where(sql.column("table_name") == self.version_table.upper()) + ) + result = driver.execute(check_sql) + + if result.data[0]["TABLE_COUNT"] == 0: + # Table doesn't exist, create it + driver.execute(self._get_create_table_sql()) + self._safe_commit(driver) + + def get_current_version(self, driver: "SyncDriverAdapterBase") -> "Optional[str]": + """Get the latest applied migration version. + + Args: + driver: The database driver to use. + + Returns: + The current migration version or None if no migrations applied. + """ + result = driver.execute(self._get_current_version_sql()) + return result.data[0]["VERSION_NUM"] if result.data else None + + def get_applied_migrations(self, driver: "SyncDriverAdapterBase") -> "list[dict[str, Any]]": + """Get all applied migrations in order. + + Args: + driver: The database driver to use. + + Returns: + List of migration records as dictionaries. + """ + result = driver.execute(self._get_applied_migrations_sql()) + if not result.data: + return [] + + # Convert Oracle's uppercase column names to lowercase for consistency + normalized_data = [] + for row in result.data: + normalized_row = {key.lower(): value for key, value in row.items()} + normalized_data.append(normalized_row) + + return cast("list[dict[str, Any]]", normalized_data) + + def record_migration( + self, driver: "SyncDriverAdapterBase", version: str, description: str, execution_time_ms: int, checksum: str + ) -> None: + """Record a successfully applied migration. + + Args: + driver: The database driver to use. + version: Version number of the migration. + description: Description of the migration. + execution_time_ms: Execution time in milliseconds. + checksum: MD5 checksum of the migration content. + """ + # Get current user for applied_by field + + applied_by = getpass.getuser() + + record_sql = self._get_record_migration_sql(version, description, execution_time_ms, checksum, applied_by) + driver.execute(record_sql) + self._safe_commit(driver) + + def remove_migration(self, driver: "SyncDriverAdapterBase", version: str) -> None: + """Remove a migration record. + + Args: + driver: The database driver to use. + version: Version number to remove. + """ + remove_sql = self._get_remove_migration_sql(version) + driver.execute(remove_sql) + self._safe_commit(driver) + + def _safe_commit(self, driver: "SyncDriverAdapterBase") -> None: + """Safely commit a transaction only if autocommit is disabled. + + Args: + driver: The database driver to use. + """ + try: + # Check if the connection has autocommit enabled + connection = getattr(driver, "connection", None) + if connection and hasattr(connection, "autocommit") and getattr(connection, "autocommit", False): + return + + # For ADBC and other drivers, check the driver_features + driver_features = getattr(driver, "driver_features", {}) + if driver_features and driver_features.get("autocommit", False): + return + + # Safe to commit manually + driver.commit() + except Exception: + # If commit fails due to no active transaction, that's acceptable + # Some drivers with autocommit will fail when trying to commit + logger.debug("Failed to commit transaction, likely due to autocommit being enabled") + + +class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTracker["AsyncDriverAdapterBase"]): + """Oracle-specific async migration tracker.""" + + async def ensure_tracking_table(self, driver: "AsyncDriverAdapterBase") -> None: + """Create the migration tracking table if it doesn't exist. + + Oracle doesn't support IF NOT EXISTS, so we check for table existence first. + + Args: + driver: The database driver to use. + """ + # Check if table already exists using Oracle's system views + check_sql = ( + sql.select(sql.count().as_("table_count")) + .from_("user_tables") + .where(sql.column("table_name") == self.version_table.upper()) + ) + result = await driver.execute(check_sql) + + if result.data[0]["TABLE_COUNT"] == 0: + # Table doesn't exist, create it + await driver.execute(self._get_create_table_sql()) + await self._safe_commit_async(driver) + + async def get_current_version(self, driver: "AsyncDriverAdapterBase") -> "Optional[str]": + """Get the latest applied migration version. + + Args: + driver: The database driver to use. + + Returns: + The current migration version or None if no migrations applied. + """ + result = await driver.execute(self._get_current_version_sql()) + return result.data[0]["VERSION_NUM"] if result.data else None + + async def get_applied_migrations(self, driver: "AsyncDriverAdapterBase") -> "list[dict[str, Any]]": + """Get all applied migrations in order. + + Args: + driver: The database driver to use. + + Returns: + List of migration records as dictionaries. + """ + result = await driver.execute(self._get_applied_migrations_sql()) + if not result.data: + return [] + + # Convert Oracle's uppercase column names to lowercase for consistency + normalized_data = [] + for row in result.data: + normalized_row = {key.lower(): value for key, value in row.items()} + normalized_data.append(normalized_row) + + return cast("list[dict[str, Any]]", normalized_data) + + async def record_migration( + self, driver: "AsyncDriverAdapterBase", version: str, description: str, execution_time_ms: int, checksum: str + ) -> None: + """Record a successfully applied migration. + + Args: + driver: The database driver to use. + version: Version number of the migration. + description: Description of the migration. + execution_time_ms: Execution time in milliseconds. + checksum: MD5 checksum of the migration content. + """ + # Get current user for applied_by field + import getpass + + applied_by = getpass.getuser() + + record_sql = self._get_record_migration_sql(version, description, execution_time_ms, checksum, applied_by) + await driver.execute(record_sql) + await self._safe_commit_async(driver) + + async def remove_migration(self, driver: "AsyncDriverAdapterBase", version: str) -> None: + """Remove a migration record. + + Args: + driver: The database driver to use. + version: Version number to remove. + """ + remove_sql = self._get_remove_migration_sql(version) + await driver.execute(remove_sql) + await self._safe_commit_async(driver) + + async def _safe_commit_async(self, driver: "AsyncDriverAdapterBase") -> None: + """Safely commit a transaction only if autocommit is disabled. + + Args: + driver: The database driver to use. + """ + try: + # Check if the connection has autocommit enabled + connection = getattr(driver, "connection", None) + if connection and hasattr(connection, "autocommit") and getattr(connection, "autocommit", False): + return + + # For ADBC and other drivers, check the driver_features + driver_features = getattr(driver, "driver_features", {}) + if driver_features and driver_features.get("autocommit", False): + return + + # Safe to commit manually + await driver.commit() + except Exception: + # If commit fails due to no active transaction, that's acceptable + # Some drivers with autocommit will fail when trying to commit + logger.debug("Failed to commit transaction, likely due to autocommit being enabled") diff --git a/sqlspec/adapters/psqlpy/config.py b/sqlspec/adapters/psqlpy/config.py index d0b85ec40..e874cd4a6 100644 --- a/sqlspec/adapters/psqlpy/config.py +++ b/sqlspec/adapters/psqlpy/config.py @@ -145,6 +145,10 @@ async def _close_pool(self) -> None: logger.exception("Failed to close psqlpy connection pool", extra={"adapter": "psqlpy", "error": str(e)}) raise + async def close_pool(self) -> None: + """Close the connection pool.""" + await self._close_pool() + async def create_connection(self) -> "PsqlpyConnection": """Create a single async connection (not from pool). diff --git a/sqlspec/adapters/psqlpy/driver.py b/sqlspec/adapters/psqlpy/driver.py index 30959e37b..aa234097b 100644 --- a/sqlspec/adapters/psqlpy/driver.py +++ b/sqlspec/adapters/psqlpy/driver.py @@ -65,7 +65,7 @@ SPECIAL_TYPE_REGEX: Final[re.Pattern[str]] = re.compile( r"^(?:" - r"(?P[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}|[0-9a-f]{32})|" + r"(?P[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})|" r"(?P(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(?:/(?:3[0-2]|[12]?[0-9]))?)|" r"(?P(?:(?:[0-9a-f]{1,4}:){7}[0-9a-f]{1,4}|(?:[0-9a-f]{1,4}:){1,7}:|:(?::[0-9a-f]{1,4}){1,7}|(?:[0-9a-f]{1,4}:){1,6}:[0-9a-f]{1,4}|::(?:ffff:)?(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))(?:/(?:12[0-8]|1[01][0-9]|[1-9]?[0-9]))?)|" r"(?P(?:[0-9a-f]{2}[:-]){5}[0-9a-f]{2}|[0-9a-f]{12})|" @@ -84,7 +84,7 @@ def _detect_postgresql_type(value: str) -> Optional[str]: """Detect PostgreSQL data type from string value using enhanced regex. The SPECIAL_TYPE_REGEX pattern matches the following PostgreSQL types: - - uuid: Standard UUID format (with dashes) or 32 hex characters (without dashes) + - uuid: Standard UUID format (with dashes only, not 32 hex characters to avoid false positives) - ipv4: IPv4 addresses with optional CIDR notation (e.g., 192.168.1.1/24) - ipv6: All IPv6 formats including compressed forms and IPv4-mapped addresses - mac: MAC addresses in colon/dash separated or continuous format diff --git a/sqlspec/builder/_base.py b/sqlspec/builder/_base.py index 161265be8..9563ed7bf 100644 --- a/sqlspec/builder/_base.py +++ b/sqlspec/builder/_base.py @@ -223,6 +223,11 @@ def _generate_builder_cache_key(self, config: "Optional[StatementConfig]" = None import hashlib dialect_name: str = self.dialect_name or "default" + + # Ensure expression is built before generating cache key + if self._expression is None: + self._expression = self._create_base_expression() + expr_sql: str = self._expression.sql() if self._expression else "None" state_parts = [ @@ -387,7 +392,7 @@ def to_statement(self, config: "Optional[StatementConfig]" = None) -> "SQL": """ cache_config = get_cache_config() if not cache_config.compiled_cache_enabled: - return self._to_statement_without_cache(config) + return self._to_statement(config) cache_key_str = self._generate_builder_cache_key(config) cache_key = CacheKey((cache_key_str,)) @@ -397,13 +402,13 @@ def to_statement(self, config: "Optional[StatementConfig]" = None) -> "SQL": if cached_sql is not None: return cast("SQL", cached_sql) - sql_statement = self._to_statement_without_cache(config) + sql_statement = self._to_statement(config) unified_cache.put(cache_key, sql_statement) return sql_statement - def _to_statement_without_cache(self, config: "Optional[StatementConfig]" = None) -> "SQL": - """Internal method to create SQL statement without caching. + def _to_statement(self, config: "Optional[StatementConfig]" = None) -> "SQL": + """Internal method to create SQL statement. Args: config: Optional SQL configuration. @@ -427,18 +432,32 @@ def _to_statement_without_cache(self, config: "Optional[StatementConfig]" = None ) if config is None: - from sqlspec.core.statement import StatementConfig - - parameter_config = ParameterStyleConfig( - default_parameter_style=ParameterStyle.QMARK, supported_parameter_styles={ParameterStyle.QMARK} + config = StatementConfig( + parameter_config=ParameterStyleConfig( + default_parameter_style=ParameterStyle.QMARK, supported_parameter_styles={ParameterStyle.QMARK} + ), + dialect=safe_query.dialect, ) - config = StatementConfig(parameter_config=parameter_config, dialect=safe_query.dialect) + + # Re-generate SQL if config dialect differs from SafeQuery dialect + sql_string = safe_query.sql + if ( + config.dialect is not None + and config.dialect != safe_query.dialect + and self._expression is not None + and hasattr(self._expression, "sql") + ): + try: + sql_string = self._expression.sql(dialect=config.dialect, pretty=True) + except Exception: + # Fall back to original SQL if dialect-specific generation fails + sql_string = safe_query.sql if kwargs: - return SQL(safe_query.sql, statement_config=config, **kwargs) + return SQL(sql_string, statement_config=config, **kwargs) if parameters: - return SQL(safe_query.sql, *parameters, statement_config=config) - return SQL(safe_query.sql, statement_config=config) + return SQL(sql_string, *parameters, statement_config=config) + return SQL(sql_string, statement_config=config) def __str__(self) -> str: """Return the SQL string representation of the query. diff --git a/sqlspec/builder/_ddl.py b/sqlspec/builder/_ddl.py index 68749b822..1a5b5717c 100644 --- a/sqlspec/builder/_ddl.py +++ b/sqlspec/builder/_ddl.py @@ -1,6 +1,5 @@ """DDL builders for SQLSpec: DROP, CREATE INDEX, TRUNCATE, etc.""" -from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Optional, Union from sqlglot import exp @@ -8,7 +7,6 @@ from typing_extensions import Self from sqlspec.builder._base import QueryBuilder, SafeQuery -from sqlspec.builder._ddl_utils import build_column_expression, build_constraint_expression from sqlspec.core.result import SQLResult if TYPE_CHECKING: @@ -37,16 +35,111 @@ ) -@dataclass +def build_column_expression(col: "ColumnDefinition") -> "exp.Expression": + """Build SQLGlot expression for a column definition.""" + col_def = exp.ColumnDef(this=exp.to_identifier(col.name), kind=exp.DataType.build(col.dtype)) + + constraints: list[exp.ColumnConstraint] = [] + + if col.not_null: + constraints.append(exp.ColumnConstraint(kind=exp.NotNullColumnConstraint())) + + if col.primary_key: + constraints.append(exp.ColumnConstraint(kind=exp.PrimaryKeyColumnConstraint())) + + if col.unique: + constraints.append(exp.ColumnConstraint(kind=exp.UniqueColumnConstraint())) + + if col.default is not None: + default_expr: Optional[exp.Expression] = None + if isinstance(col.default, str): + # Use SQLGlot's built-in functions for database-specific default values + default_upper = col.default.upper() + if default_upper == "CURRENT_TIMESTAMP": + default_expr = exp.CurrentTimestamp() + elif default_upper == "CURRENT_DATE": + default_expr = exp.CurrentDate() + elif default_upper == "CURRENT_TIME": + default_expr = exp.CurrentTime() + elif "(" in col.default: + default_expr = exp.maybe_parse(col.default) + else: + default_expr = exp.convert(col.default) + else: + default_expr = exp.convert(col.default) + + # Use DefaultColumnConstraint for proper default value handling + constraints.append(exp.ColumnConstraint(kind=exp.DefaultColumnConstraint(this=default_expr))) + + if col.check: + constraints.append(exp.ColumnConstraint(kind=exp.Check(this=exp.maybe_parse(col.check)))) + + if col.comment: + constraints.append(exp.ColumnConstraint(kind=exp.CommentColumnConstraint(this=exp.convert(col.comment)))) + + if col.generated: + constraints.append( + exp.ColumnConstraint(kind=exp.GeneratedAsIdentityColumnConstraint(this=exp.maybe_parse(col.generated))) + ) + + if col.collate: + constraints.append(exp.ColumnConstraint(kind=exp.CollateColumnConstraint(this=exp.to_identifier(col.collate)))) + + if constraints: + col_def.set("constraints", constraints) + + return col_def + + +def build_constraint_expression(constraint: "ConstraintDefinition") -> "Optional[exp.Expression]": + """Build SQLGlot expression for a table constraint.""" + if constraint.constraint_type == "PRIMARY KEY": + pk_constraint = exp.PrimaryKey(expressions=[exp.to_identifier(col) for col in constraint.columns]) + + if constraint.name: + return exp.Constraint(this=exp.to_identifier(constraint.name), expression=pk_constraint) + return pk_constraint + + if constraint.constraint_type == "FOREIGN KEY": + fk_constraint = exp.ForeignKey( + expressions=[exp.to_identifier(col) for col in constraint.columns], + reference=exp.Reference( + this=exp.to_table(constraint.references_table) if constraint.references_table else None, + expressions=[exp.to_identifier(col) for col in constraint.references_columns], + on_delete=constraint.on_delete, + on_update=constraint.on_update, + ), + ) + + if constraint.name: + return exp.Constraint(this=exp.to_identifier(constraint.name), expression=fk_constraint) + return fk_constraint + + if constraint.constraint_type == "UNIQUE": + unique_constraint = exp.UniqueKeyProperty(expressions=[exp.to_identifier(col) for col in constraint.columns]) + + if constraint.name: + return exp.Constraint(this=exp.to_identifier(constraint.name), expression=unique_constraint) + return unique_constraint + + if constraint.constraint_type == "CHECK": + check_expr = exp.Check(this=exp.maybe_parse(constraint.condition) if constraint.condition else None) + + if constraint.name: + return exp.Constraint(this=exp.to_identifier(constraint.name), expression=check_expr) + return check_expr + + return None + + class DDLBuilder(QueryBuilder): """Base class for DDL builders (CREATE, DROP, ALTER, etc).""" - dialect: DialectType = None - _expression: Optional[exp.Expression] = field(default=None, init=False, repr=False, compare=False, hash=False) + __slots__ = () - def __post_init__(self) -> None: - # Initialize parent class attributes since dataclass doesn't call super().__init__() - super().__init__(dialect=self.dialect) + def __init__(self, dialect: DialectType = None) -> None: + super().__init__(dialect=dialect) + self._expression: Optional[exp.Expression] = None def _create_base_expression(self) -> exp.Expression: msg = "Subclasses must implement _create_base_expression." @@ -65,40 +158,91 @@ def to_statement(self, config: "Optional[StatementConfig]" = None) -> "SQL": return super().to_statement(config=config) -@dataclass class ColumnDefinition: """Column definition for CREATE TABLE.""" - name: str - dtype: str - default: "Optional[Any]" = None - not_null: bool = False - primary_key: bool = False - unique: bool = False - auto_increment: bool = False - comment: "Optional[str]" = None - check: "Optional[str]" = None - generated: "Optional[str]" = None - collate: "Optional[str]" = None + __slots__ = ( + "auto_increment", + "check", + "collate", + "comment", + "default", + "dtype", + "generated", + "name", + "not_null", + "primary_key", + "unique", + ) + + def __init__( + self, + name: str, + dtype: str, + default: "Optional[Any]" = None, + not_null: bool = False, + primary_key: bool = False, + unique: bool = False, + auto_increment: bool = False, + comment: "Optional[str]" = None, + check: "Optional[str]" = None, + generated: "Optional[str]" = None, + collate: "Optional[str]" = None, + ) -> None: + self.name = name + self.dtype = dtype + self.default = default + self.not_null = not_null + self.primary_key = primary_key + self.unique = unique + self.auto_increment = auto_increment + self.comment = comment + self.check = check + self.generated = generated + self.collate = collate -@dataclass class ConstraintDefinition: """Constraint definition for CREATE TABLE.""" - constraint_type: str - name: "Optional[str]" = None - columns: "list[str]" = field(default_factory=list) - references_table: "Optional[str]" = None - references_columns: "list[str]" = field(default_factory=list) - condition: "Optional[str]" = None - on_delete: "Optional[str]" = None - on_update: "Optional[str]" = None - deferrable: bool = False - initially_deferred: bool = False + __slots__ = ( + "columns", + "condition", + "constraint_type", + "deferrable", + "initially_deferred", + "name", + "on_delete", + "on_update", + "references_columns", + "references_table", + ) + + def __init__( + self, + constraint_type: str, + name: "Optional[str]" = None, + columns: "Optional[list[str]]" = None, + references_table: "Optional[str]" = None, + references_columns: "Optional[list[str]]" = None, + condition: "Optional[str]" = None, + on_delete: "Optional[str]" = None, + on_update: "Optional[str]" = None, + deferrable: bool = False, + initially_deferred: bool = False, + ) -> None: + self.constraint_type = constraint_type + self.name = name + self.columns = columns or [] + self.references_table = references_table + self.references_columns = references_columns or [] + self.condition = condition + self.on_delete = on_delete + self.on_update = on_update + self.deferrable = deferrable + self.initially_deferred = initially_deferred -@dataclass class CreateTable(DDLBuilder): """Builder for CREATE TABLE statements with columns and constraints. @@ -113,20 +257,31 @@ class CreateTable(DDLBuilder): sql = builder.build().sql """ - _table_name: str = field(default="", init=False) - _if_not_exists: bool = False - _temporary: bool = False - _columns: "list[ColumnDefinition]" = field(default_factory=list) - _constraints: "list[ConstraintDefinition]" = field(default_factory=list) - _table_options: "dict[str, Any]" = field(default_factory=dict) - _schema: "Optional[str]" = None - _tablespace: "Optional[str]" = None - _like_table: "Optional[str]" = None - _partition_by: "Optional[str]" = None - - def __init__(self, table_name: str) -> None: - super().__init__() + __slots__ = ( + "_columns", + "_constraints", + "_if_not_exists", + "_like_table", + "_partition_by", + "_schema", + "_table_name", + "_table_options", + "_tablespace", + "_temporary", + ) + + def __init__(self, table_name: str, dialect: DialectType = None) -> None: + super().__init__(dialect=dialect) self._table_name = table_name + self._if_not_exists = False + self._temporary = False + self._columns: list[ColumnDefinition] = [] + self._constraints: list[ConstraintDefinition] = [] + self._table_options: dict[str, Any] = {} + self._schema: Optional[str] = None + self._tablespace: Optional[str] = None + self._like_table: Optional[str] = None + self._partition_by: Optional[str] = None def in_schema(self, schema_name: str) -> "Self": """Set the schema for the table.""" @@ -320,11 +475,7 @@ def _create_base_expression(self) -> "exp.Expression": if not self._columns and not self._like_table: self._raise_sql_builder_error("Table must have at least one column or use LIKE clause") - if self._schema: - table = exp.Table(this=exp.to_identifier(self._table_name), db=exp.to_identifier(self._schema)) - else: - table = exp.to_table(self._table_name) - + # Build column definitions and constraints column_defs: list[exp.Expression] = [] for col in self._columns: col_expr = build_column_expression(col) @@ -340,6 +491,7 @@ def _create_base_expression(self) -> "exp.Expression": if constraint_expr: column_defs.append(constraint_expr) + # Build table properties props: list[exp.Property] = [] if self._table_options.get("engine"): props.append( @@ -358,7 +510,17 @@ def _create_base_expression(self) -> "exp.Expression": properties_node = exp.Properties(expressions=props) if props else None - schema_expr = exp.Schema(expressions=column_defs) if column_defs else None + # FIXED: Create Schema with table name and columns (not Table object) + # This ensures SQLGlot generates "CREATE TABLE name (...)" instead of "CREATE TABLE name AS (...)" + if self._schema: + table_identifier = exp.Table(this=exp.to_identifier(self._table_name), db=exp.to_identifier(self._schema)) + else: + table_identifier = exp.Table(this=exp.to_identifier(self._table_name)) + + schema_expr = exp.Schema( + this=table_identifier, # Table name goes here + expressions=column_defs, # Column definitions go here + ) like_expr = None if self._like_table: @@ -366,42 +528,30 @@ def _create_base_expression(self) -> "exp.Expression": return exp.Create( kind="TABLE", - this=table, + this=schema_expr, # FIXED: Use Schema as 'this', not Table exists=self._if_not_exists, temporary=self._temporary, - expression=schema_expr, properties=properties_node, like=like_expr, ) - @staticmethod - def _build_column_expression(col: "ColumnDefinition") -> "exp.Expression": - """Build SQLGlot expression for a column definition.""" - return build_column_expression(col) - - @staticmethod - def _build_constraint_expression(constraint: "ConstraintDefinition") -> "Optional[exp.Expression]": - """Build SQLGlot expression for a table constraint.""" - return build_constraint_expression(constraint) - -@dataclass class DropTable(DDLBuilder): """Builder for DROP TABLE [IF EXISTS] ... [CASCADE|RESTRICT].""" - _table_name: Optional[str] = None - _if_exists: bool = False - _cascade: Optional[bool] = None + __slots__ = ("_cascade", "_if_exists", "_table_name") - def __init__(self, table_name: str, **kwargs: Any) -> None: + def __init__(self, table_name: str, dialect: DialectType = None) -> None: """Initialize DROP TABLE with table name. Args: table_name: Name of the table to drop - **kwargs: Additional DDLBuilder arguments + dialect: SQL dialect to use """ - super().__init__(**kwargs) + super().__init__(dialect=dialect) self._table_name = table_name + self._if_exists = False + self._cascade: Optional[bool] = None def table(self, name: str) -> Self: self._table_name = name @@ -427,24 +577,23 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class DropIndex(DDLBuilder): """Builder for DROP INDEX [IF EXISTS] ... [ON table] [CASCADE|RESTRICT].""" - _index_name: Optional[str] = None - _table_name: Optional[str] = None - _if_exists: bool = False - _cascade: Optional[bool] = None + __slots__ = ("_cascade", "_if_exists", "_index_name", "_table_name") - def __init__(self, index_name: str, **kwargs: Any) -> None: + def __init__(self, index_name: str, dialect: DialectType = None) -> None: """Initialize DROP INDEX with index name. Args: index_name: Name of the index to drop - **kwargs: Additional DDLBuilder arguments + dialect: SQL dialect to use """ - super().__init__(**kwargs) + super().__init__(dialect=dialect) self._index_name = index_name + self._table_name: Optional[str] = None + self._if_exists = False + self._cascade: Optional[bool] = None def name(self, index_name: str) -> Self: self._index_name = index_name @@ -478,13 +627,22 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class DropView(DDLBuilder): """Builder for DROP VIEW [IF EXISTS] ... [CASCADE|RESTRICT].""" - _view_name: Optional[str] = None - _if_exists: bool = False - _cascade: Optional[bool] = None + __slots__ = ("_cascade", "_if_exists", "_view_name") + + def __init__(self, view_name: str, dialect: DialectType = None) -> None: + """Initialize DROP VIEW with view name. + + Args: + view_name: Name of the view to drop + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._view_name = view_name + self._if_exists = False + self._cascade: Optional[bool] = None def name(self, view_name: str) -> Self: self._view_name = view_name @@ -510,13 +668,22 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class DropSchema(DDLBuilder): """Builder for DROP SCHEMA [IF EXISTS] ... [CASCADE|RESTRICT].""" - _schema_name: Optional[str] = None - _if_exists: bool = False - _cascade: Optional[bool] = None + __slots__ = ("_cascade", "_if_exists", "_schema_name") + + def __init__(self, schema_name: str, dialect: DialectType = None) -> None: + """Initialize DROP SCHEMA with schema name. + + Args: + schema_name: Name of the schema to drop + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._schema_name = schema_name + self._if_exists = False + self._cascade: Optional[bool] = None def name(self, schema_name: str) -> Self: self._schema_name = schema_name @@ -542,32 +709,29 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class CreateIndex(DDLBuilder): """Builder for CREATE [UNIQUE] INDEX [IF NOT EXISTS] ... ON ... (...). Supports columns, expressions, ordering, using, and where. """ - _index_name: Optional[str] = None - _table_name: Optional[str] = None - _columns: list[Union[str, exp.Ordered, exp.Expression]] = field(default_factory=list) - _unique: bool = False - _if_not_exists: bool = False - _using: Optional[str] = None - _where: Optional[Union[str, exp.Expression]] = None + __slots__ = ("_columns", "_if_not_exists", "_index_name", "_table_name", "_unique", "_using", "_where") - def __init__(self, index_name: str, **kwargs: Any) -> None: + def __init__(self, index_name: str, dialect: DialectType = None) -> None: """Initialize CREATE INDEX with index name. Args: index_name: Name of the index to create - **kwargs: Additional DDLBuilder arguments + dialect: SQL dialect to use """ - super().__init__(**kwargs) + super().__init__(dialect=dialect) self._index_name = index_name - if not hasattr(self, "_columns"): - self._columns = [] + self._table_name: Optional[str] = None + self._columns: list[Union[str, exp.Ordered, exp.Expression]] = [] + self._unique = False + self._if_not_exists = False + self._using: Optional[str] = None + self._where: Optional[Union[str, exp.Expression]] = None def name(self, index_name: str) -> Self: self._index_name = index_name @@ -625,13 +789,22 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class Truncate(DDLBuilder): """Builder for TRUNCATE TABLE ... [CASCADE|RESTRICT] [RESTART IDENTITY|CONTINUE IDENTITY].""" - _table_name: Optional[str] = None - _cascade: Optional[bool] = None - _identity: Optional[str] = None + __slots__ = ("_cascade", "_identity", "_table_name") + + def __init__(self, table_name: str, dialect: DialectType = None) -> None: + """Initialize TRUNCATE with table name. + + Args: + table_name: Name of the table to truncate + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._table_name = table_name + self._cascade: Optional[bool] = None + self._identity: Optional[str] = None def table(self, name: str) -> Self: self._table_name = name @@ -660,29 +833,63 @@ def _create_base_expression(self) -> exp.Expression: return exp.TruncateTable(this=exp.to_table(self._table_name), cascade=self._cascade, identity=identity_expr) -@dataclass class AlterOperation: """Represents a single ALTER TABLE operation.""" - operation_type: str - column_name: "Optional[str]" = None - column_definition: "Optional[ColumnDefinition]" = None - constraint_name: "Optional[str]" = None - constraint_definition: "Optional[ConstraintDefinition]" = None - new_type: "Optional[str]" = None - new_name: "Optional[str]" = None - after_column: "Optional[str]" = None - first: bool = False - using_expression: "Optional[str]" = None + __slots__ = ( + "after_column", + "column_definition", + "column_name", + "constraint_definition", + "constraint_name", + "first", + "new_name", + "new_type", + "operation_type", + "using_expression", + ) + + def __init__( + self, + operation_type: str, + column_name: "Optional[str]" = None, + column_definition: "Optional[ColumnDefinition]" = None, + constraint_name: "Optional[str]" = None, + constraint_definition: "Optional[ConstraintDefinition]" = None, + new_type: "Optional[str]" = None, + new_name: "Optional[str]" = None, + after_column: "Optional[str]" = None, + first: bool = False, + using_expression: "Optional[str]" = None, + ) -> None: + self.operation_type = operation_type + self.column_name = column_name + self.column_definition = column_definition + self.constraint_name = constraint_name + self.constraint_definition = constraint_definition + self.new_type = new_type + self.new_name = new_name + self.after_column = after_column + self.first = first + self.using_expression = using_expression -@dataclass class CreateSchema(DDLBuilder): """Builder for CREATE SCHEMA [IF NOT EXISTS] schema_name [AUTHORIZATION user_name].""" - _schema_name: Optional[str] = None - _if_not_exists: bool = False - _authorization: Optional[str] = None + __slots__ = ("_authorization", "_if_not_exists", "_schema_name") + + def __init__(self, schema_name: str, dialect: DialectType = None) -> None: + """Initialize CREATE SCHEMA with schema name. + + Args: + schema_name: Name of the schema to create + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._schema_name = schema_name + self._if_not_exists = False + self._authorization: Optional[str] = None def name(self, schema_name: str) -> Self: self._schema_name = schema_name @@ -713,7 +920,6 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class CreateTableAsSelect(DDLBuilder): """Builder for CREATE TABLE [IF NOT EXISTS] ... AS SELECT ... (CTAS). @@ -736,10 +942,14 @@ class CreateTableAsSelect(DDLBuilder): - as_select(select_query): Set the SELECT source (SQL, SelectBuilder, or str). """ - _table_name: Optional[str] = None - _if_not_exists: bool = False - _columns: list[str] = field(default_factory=list) - _select_query: Optional[object] = None + __slots__ = ("_columns", "_if_not_exists", "_select_query", "_table_name") + + def __init__(self, dialect: DialectType = None) -> None: + super().__init__(dialect=dialect) + self._table_name: Optional[str] = None + self._if_not_exists = False + self._columns: list[str] = [] + self._select_query: Optional[object] = None def name(self, table_name: str) -> Self: self._table_name = table_name @@ -753,7 +963,7 @@ def columns(self, *cols: str) -> Self: self._columns = list(cols) return self - def as_select(self, select_query: object) -> Self: + def as_select(self, select_query: "Union[str, exp.Expression]") -> Self: self._select_query = select_query return self @@ -805,23 +1015,43 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class CreateMaterializedView(DDLBuilder): """Builder for CREATE MATERIALIZED VIEW [IF NOT EXISTS] ... AS SELECT ... Supports optional column list, parameterized SELECT sources, and dialect-specific options. """ - _view_name: Optional[str] = None - _if_not_exists: bool = False - _columns: list[str] = field(default_factory=list) - _select_query: Optional[object] = None - _with_data: Optional[bool] = None - _refresh_mode: Optional[str] = None - _storage_parameters: dict[str, Any] = field(default_factory=dict) - _tablespace: Optional[str] = None - _using_index: Optional[str] = None - _hints: list[str] = field(default_factory=list) + __slots__ = ( + "_columns", + "_hints", + "_if_not_exists", + "_refresh_mode", + "_select_query", + "_storage_parameters", + "_tablespace", + "_using_index", + "_view_name", + "_with_data", + ) + + def __init__(self, view_name: str, dialect: DialectType = None) -> None: + """Initialize CREATE MATERIALIZED VIEW with view name. + + Args: + view_name: Name of the materialized view to create + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._view_name = view_name + self._if_not_exists = False + self._columns: list[str] = [] + self._select_query: Optional[Union[str, exp.Expression]] = None + self._with_data: Optional[bool] = None + self._refresh_mode: Optional[str] = None + self._storage_parameters: dict[str, Any] = {} + self._tablespace: Optional[str] = None + self._using_index: Optional[str] = None + self._hints: list[str] = [] def name(self, view_name: str) -> Self: self._view_name = view_name @@ -835,7 +1065,7 @@ def columns(self, *cols: str) -> Self: self._columns = list(cols) return self - def as_select(self, select_query: object) -> Self: + def as_select(self, select_query: "Union[str, exp.Expression]") -> Self: self._select_query = select_query return self @@ -926,18 +1156,27 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class CreateView(DDLBuilder): """Builder for CREATE VIEW [IF NOT EXISTS] ... AS SELECT ... Supports optional column list, parameterized SELECT sources, and hints. """ - _view_name: Optional[str] = None - _if_not_exists: bool = False - _columns: list[str] = field(default_factory=list) - _select_query: Optional[object] = None - _hints: list[str] = field(default_factory=list) + __slots__ = ("_columns", "_hints", "_if_not_exists", "_select_query", "_view_name") + + def __init__(self, view_name: str, dialect: DialectType = None) -> None: + """Initialize CREATE VIEW with view name. + + Args: + view_name: Name of the view to create + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._view_name = view_name + self._if_not_exists = False + self._columns: list[str] = [] + self._select_query: Optional[Union[str, exp.Expression]] = None + self._hints: list[str] = [] def name(self, view_name: str) -> Self: self._view_name = view_name @@ -951,7 +1190,7 @@ def columns(self, *cols: str) -> Self: self._columns = list(cols) return self - def as_select(self, select_query: object) -> Self: + def as_select(self, select_query: "Union[str, exp.Expression]") -> Self: self._select_query = select_query return self @@ -1007,7 +1246,6 @@ def _create_base_expression(self) -> exp.Expression: ) -@dataclass class AlterTable(DDLBuilder): """Builder for ALTER TABLE with granular operations. @@ -1015,23 +1253,20 @@ class AlterTable(DDLBuilder): Example: builder = ( - AlterTableBuilder("users") + AlterTable("users") .add_column("email", "VARCHAR(255)", not_null=True) .drop_column("old_field") .add_constraint("check_age", "CHECK (age >= 18)") ) """ - _table_name: str = field(default="", init=False) - _operations: "list[AlterOperation]" = field(default_factory=list) - _schema: "Optional[str]" = None - _if_exists: bool = False + __slots__ = ("_if_exists", "_operations", "_schema", "_table_name") - def __init__(self, table_name: str) -> None: - super().__init__() + def __init__(self, table_name: str, dialect: DialectType = None) -> None: + super().__init__(dialect=dialect) self._table_name = table_name - self._operations = [] - self._schema = None + self._operations: list[AlterOperation] = [] + self._schema: Optional[str] = None self._if_exists = False def if_exists(self) -> "Self": @@ -1277,17 +1512,25 @@ def _build_operation_expression(self, op: "AlterOperation") -> exp.Expression: raise AssertionError -@dataclass class CommentOn(DDLBuilder): """Builder for COMMENT ON ... IS ... statements. Supports COMMENT ON TABLE and COMMENT ON COLUMN. """ - _target_type: Optional[str] = None - _table: Optional[str] = None - _column: Optional[str] = None - _comment: Optional[str] = None + __slots__ = ("_column", "_comment", "_table", "_target_type") + + def __init__(self, dialect: DialectType = None) -> None: + """Initialize COMMENT ON builder. + + Args: + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._target_type: Optional[str] = None + self._table: Optional[str] = None + self._column: Optional[str] = None + self._comment: Optional[str] = None def on_table(self, table: str) -> Self: self._target_type = "TABLE" @@ -1318,15 +1561,24 @@ def _create_base_expression(self) -> exp.Expression: raise AssertionError -@dataclass class RenameTable(DDLBuilder): """Builder for ALTER TABLE ... RENAME TO ... statements. Supports renaming a table. """ - _old_name: Optional[str] = None - _new_name: Optional[str] = None + __slots__ = ("_new_name", "_old_name") + + def __init__(self, old_name: str, dialect: DialectType = None) -> None: + """Initialize RENAME TABLE with old name. + + Args: + old_name: Current name of the table + dialect: SQL dialect to use + """ + super().__init__(dialect=dialect) + self._old_name = old_name + self._new_name: Optional[str] = None def table(self, old_name: str) -> Self: self._old_name = old_name diff --git a/sqlspec/builder/_ddl_utils.py b/sqlspec/builder/_ddl_utils.py deleted file mode 100644 index e8aa961ed..000000000 --- a/sqlspec/builder/_ddl_utils.py +++ /dev/null @@ -1,103 +0,0 @@ -"""DDL builder utilities.""" - -from typing import TYPE_CHECKING, Optional - -from sqlglot import exp - -if TYPE_CHECKING: - from sqlspec.builder._ddl import ColumnDefinition, ConstraintDefinition - -__all__ = ("build_column_expression", "build_constraint_expression") - - -def build_column_expression(col: "ColumnDefinition") -> "exp.Expression": - """Build SQLGlot expression for a column definition.""" - col_def = exp.ColumnDef(this=exp.to_identifier(col.name), kind=exp.DataType.build(col.dtype)) - - constraints: list[exp.ColumnConstraint] = [] - - if col.not_null: - constraints.append(exp.ColumnConstraint(kind=exp.NotNullColumnConstraint())) - - if col.primary_key: - constraints.append(exp.ColumnConstraint(kind=exp.PrimaryKeyColumnConstraint())) - - if col.unique: - constraints.append(exp.ColumnConstraint(kind=exp.UniqueColumnConstraint())) - - if col.default is not None: - default_expr: Optional[exp.Expression] = None - if isinstance(col.default, str): - if col.default.upper() in {"CURRENT_TIMESTAMP", "CURRENT_DATE", "CURRENT_TIME"} or "(" in col.default: - default_expr = exp.maybe_parse(col.default) - else: - default_expr = exp.convert(col.default) - else: - default_expr = exp.convert(col.default) - - constraints.append(exp.ColumnConstraint(kind=default_expr)) - - if col.check: - check_expr = exp.Check(this=exp.maybe_parse(col.check)) - constraints.append(exp.ColumnConstraint(kind=check_expr)) - - if col.comment: - constraints.append(exp.ColumnConstraint(kind=exp.CommentColumnConstraint(this=exp.convert(col.comment)))) - - if col.generated: - generated_expr = exp.GeneratedAsIdentityColumnConstraint(this=exp.maybe_parse(col.generated)) - constraints.append(exp.ColumnConstraint(kind=generated_expr)) - - if col.collate: - constraints.append(exp.ColumnConstraint(kind=exp.CollateColumnConstraint(this=exp.to_identifier(col.collate)))) - - if constraints: - col_def.set("constraints", constraints) - - return col_def - - -def build_constraint_expression(constraint: "ConstraintDefinition") -> "Optional[exp.Expression]": - """Build SQLGlot expression for a table constraint.""" - if constraint.constraint_type == "PRIMARY KEY": - pk_cols = [exp.to_identifier(col) for col in constraint.columns] - pk_constraint = exp.PrimaryKey(expressions=pk_cols) - - if constraint.name: - return exp.Constraint(this=exp.to_identifier(constraint.name), expression=pk_constraint) - return pk_constraint - - if constraint.constraint_type == "FOREIGN KEY": - fk_cols = [exp.to_identifier(col) for col in constraint.columns] - ref_cols = [exp.to_identifier(col) for col in constraint.references_columns] - - fk_constraint = exp.ForeignKey( - expressions=fk_cols, - reference=exp.Reference( - this=exp.to_table(constraint.references_table) if constraint.references_table else None, - expressions=ref_cols, - on_delete=constraint.on_delete, - on_update=constraint.on_update, - ), - ) - - if constraint.name: - return exp.Constraint(this=exp.to_identifier(constraint.name), expression=fk_constraint) - return fk_constraint - - if constraint.constraint_type == "UNIQUE": - unique_cols = [exp.to_identifier(col) for col in constraint.columns] - unique_constraint = exp.UniqueKeyProperty(expressions=unique_cols) - - if constraint.name: - return exp.Constraint(this=exp.to_identifier(constraint.name), expression=unique_constraint) - return unique_constraint - - if constraint.constraint_type == "CHECK": - check_expr = exp.Check(this=exp.maybe_parse(constraint.condition) if constraint.condition else None) - - if constraint.name: - return exp.Constraint(this=exp.to_identifier(constraint.name), expression=check_expr) - return check_expr - - return None diff --git a/sqlspec/builder/_insert.py b/sqlspec/builder/_insert.py index 0fa8e4315..f6f0c1d9f 100644 --- a/sqlspec/builder/_insert.py +++ b/sqlspec/builder/_insert.py @@ -173,7 +173,7 @@ def values(self, *values: Any, **kwargs: Any) -> "Self": else: param_name = self._generate_unique_parameter_name(f"value_{i + 1}") _, param_name = self.add_parameter(value, name=param_name) - value_placeholders.append(exp.var(param_name)) + value_placeholders.append(exp.Placeholder(this=param_name)) tuple_expr = exp.Tuple(expressions=value_placeholders) if self._values_added_count == 0: diff --git a/sqlspec/builder/mixins/_insert_operations.py b/sqlspec/builder/mixins/_insert_operations.py index 8bf9e1786..359214497 100644 --- a/sqlspec/builder/mixins/_insert_operations.py +++ b/sqlspec/builder/mixins/_insert_operations.py @@ -75,14 +75,34 @@ def columns(self, *columns: Union[str, exp.Expression]) -> Self: if not isinstance(self._expression, exp.Insert): msg = "Cannot set columns on a non-INSERT expression." raise SQLBuilderError(msg) - column_exprs = [exp.column(col) if isinstance(col, str) else col for col in columns] - self._expression.set("columns", column_exprs) + + # Get the current table from the expression + current_this = self._expression.args.get("this") + if current_this is None: + msg = "Table must be set using .into() before setting columns." + raise SQLBuilderError(msg) + + if columns: + # Create identifiers for columns + column_identifiers = [exp.to_identifier(col) if isinstance(col, str) else col for col in columns] + + # Get table name from current this + table_name = current_this.this + + # Create Schema object with table and columns + schema = exp.Schema(this=table_name, expressions=column_identifiers) + self._expression.set("this", schema) + # No columns specified - ensure we have just a Table object + elif isinstance(current_this, exp.Schema): + table_name = current_this.this + self._expression.set("this", exp.Table(this=table_name)) + try: cols = self._columns if not columns: cols.clear() else: - cols[:] = [col.name if isinstance(col, exp.Column) else str(col) for col in columns] + cols[:] = [col if isinstance(col, str) else str(col) for col in columns] except AttributeError: pass return self @@ -128,7 +148,7 @@ def values(self, *values: Any, **kwargs: Any) -> Self: column_name = column_name.split(".")[-1] param_name = self._generate_unique_parameter_name(column_name) _, param_name = self.add_parameter(val, name=param_name) - row_exprs.append(exp.var(param_name)) + row_exprs.append(exp.Placeholder(this=param_name)) elif len(values) == 1 and hasattr(values[0], "items"): mapping = values[0] try: @@ -147,7 +167,7 @@ def values(self, *values: Any, **kwargs: Any) -> Self: column_name = column_name.split(".")[-1] param_name = self._generate_unique_parameter_name(column_name) _, param_name = self.add_parameter(val, name=param_name) - row_exprs.append(exp.var(param_name)) + row_exprs.append(exp.Placeholder(this=param_name)) else: try: _columns = self._columns @@ -173,7 +193,7 @@ def values(self, *values: Any, **kwargs: Any) -> Self: except AttributeError: param_name = self._generate_unique_parameter_name(f"value_{i + 1}") _, param_name = self.add_parameter(v, name=param_name) - row_exprs.append(exp.var(param_name)) + row_exprs.append(exp.Placeholder(this=param_name)) values_expr = exp.Values(expressions=[row_exprs]) self._expression.set("expression", values_expr) diff --git a/sqlspec/builder/mixins/_merge_operations.py b/sqlspec/builder/mixins/_merge_operations.py index 190637097..627550020 100644 --- a/sqlspec/builder/mixins/_merge_operations.py +++ b/sqlspec/builder/mixins/_merge_operations.py @@ -365,7 +365,7 @@ def when_not_matched_then_insert( column_name = column_name.split(".")[-1] param_name = self._generate_unique_parameter_name(column_name) param_name = self.add_parameter(val, name=param_name)[1] - parameterized_values.append(exp.var(param_name)) + parameterized_values.append(exp.Placeholder()) insert_args["this"] = exp.Tuple(expressions=[exp.column(c) for c in columns]) insert_args["expression"] = exp.Tuple(expressions=parameterized_values) diff --git a/sqlspec/builder/mixins/_select_operations.py b/sqlspec/builder/mixins/_select_operations.py index ba9c2f0ae..b623e4a21 100644 --- a/sqlspec/builder/mixins/_select_operations.py +++ b/sqlspec/builder/mixins/_select_operations.py @@ -1,6 +1,5 @@ """SELECT clause mixins consolidated into a single module.""" -from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Optional, Union, cast from mypy_extensions import trait @@ -538,13 +537,10 @@ def case_(self, alias: "Optional[str]" = None) -> "CaseBuilder": return CaseBuilder(builder, alias) -@dataclass class CaseBuilder: """Builder for CASE expressions.""" - _parent: "SelectBuilderProtocol" - _alias: Optional[str] - _case_expr: exp.Case + __slots__ = ("_alias", "_case_expr", "_parent") def __init__(self, parent: "SelectBuilderProtocol", alias: "Optional[str]" = None) -> None: """Initialize CaseBuilder. diff --git a/sqlspec/config.py b/sqlspec/config.py index 898c7107f..99bd3675f 100644 --- a/sqlspec/config.py +++ b/sqlspec/config.py @@ -21,6 +21,7 @@ "DatabaseConfigProtocol", "DriverT", "LifecycleConfig", + "MigrationConfig", "NoPoolAsyncConfig", "NoPoolSyncConfig", "SyncConfigT", @@ -59,6 +60,23 @@ class LifecycleConfig(TypedDict, total=False): on_error: NotRequired[list[Callable[[Exception, str, dict], None]]] +class MigrationConfig(TypedDict, total=False): + """Configuration options for SQLSpec database migrations. + + This TypedDict provides type safety and IDE completion for migration configuration. + All fields are optional with sensible defaults. + """ + + script_location: NotRequired[str] + """Path to the migrations directory. Defaults to 'migrations'.""" + + version_table_name: NotRequired[str] + """Name of the table used to track applied migrations. Defaults to 'sqlspec_migrations'.""" + + project_root: NotRequired[str] + """Path to the project root directory. Used for relative path resolution.""" + + class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): """Protocol defining the interface for database configurations.""" @@ -73,7 +91,7 @@ class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): supports_native_parquet_export: "ClassVar[bool]" = False statement_config: "StatementConfig" pool_instance: "Optional[PoolT]" - migration_config: "dict[str, Any]" + migration_config: "Union[dict[str, Any], MigrationConfig]" def __hash__(self) -> int: return id(self) @@ -147,13 +165,13 @@ def __init__( self, *, connection_config: Optional[dict[str, Any]] = None, - migration_config: "Optional[dict[str, Any]]" = None, + migration_config: "Optional[Union[dict[str, Any], MigrationConfig]]" = None, statement_config: "Optional[StatementConfig]" = None, driver_features: "Optional[dict[str, Any]]" = None, ) -> None: self.pool_instance = None self.connection_config = connection_config or {} - self.migration_config: dict[str, Any] = migration_config if migration_config is not None else {} + self.migration_config: Union[dict[str, Any], MigrationConfig] = migration_config or {} if statement_config is None: default_parameter_config = ParameterStyleConfig( @@ -200,13 +218,13 @@ def __init__( self, *, connection_config: "Optional[dict[str, Any]]" = None, - migration_config: "Optional[dict[str, Any]]" = None, + migration_config: "Optional[Union[dict[str, Any], MigrationConfig]]" = None, statement_config: "Optional[StatementConfig]" = None, driver_features: "Optional[dict[str, Any]]" = None, ) -> None: self.pool_instance = None self.connection_config = connection_config or {} - self.migration_config: dict[str, Any] = migration_config if migration_config is not None else {} + self.migration_config: Union[dict[str, Any], MigrationConfig] = migration_config or {} if statement_config is None: default_parameter_config = ParameterStyleConfig( @@ -254,13 +272,13 @@ def __init__( *, pool_config: "Optional[dict[str, Any]]" = None, pool_instance: "Optional[PoolT]" = None, - migration_config: "Optional[dict[str, Any]]" = None, + migration_config: "Optional[Union[dict[str, Any], MigrationConfig]]" = None, statement_config: "Optional[StatementConfig]" = None, driver_features: "Optional[dict[str, Any]]" = None, ) -> None: self.pool_instance = pool_instance self.pool_config = pool_config or {} - self.migration_config: dict[str, Any] = migration_config if migration_config is not None else {} + self.migration_config: Union[dict[str, Any], MigrationConfig] = migration_config or {} if statement_config is None: default_parameter_config = ParameterStyleConfig( @@ -330,13 +348,13 @@ def __init__( *, pool_config: "Optional[dict[str, Any]]" = None, pool_instance: "Optional[PoolT]" = None, - migration_config: "Optional[dict[str, Any]]" = None, + migration_config: "Optional[Union[dict[str, Any], MigrationConfig]]" = None, statement_config: "Optional[StatementConfig]" = None, driver_features: "Optional[dict[str, Any]]" = None, ) -> None: self.pool_instance = pool_instance self.pool_config = pool_config or {} - self.migration_config: dict[str, Any] = migration_config if migration_config is not None else {} + self.migration_config: Union[dict[str, Any], MigrationConfig] = migration_config or {} if statement_config is None: self.statement_config = StatementConfig( diff --git a/sqlspec/driver/_common.py b/sqlspec/driver/_common.py index 91b0080e9..894a9b540 100644 --- a/sqlspec/driver/_common.py +++ b/sqlspec/driver/_common.py @@ -489,7 +489,9 @@ def _get_compiled_sql( if cached_result is not None: return cached_result - compiled_sql, execution_parameters = statement.compile() + # Ensure the statement uses the correct dialect for this driver + prepared_statement = self.prepare_statement(statement, statement_config=statement_config) + compiled_sql, execution_parameters = prepared_statement.compile() prepared_parameters = self.prepare_driver_parameters( execution_parameters, statement_config, is_many=statement.is_many diff --git a/sqlspec/migrations/adapter_discovery.py b/sqlspec/migrations/adapter_discovery.py new file mode 100644 index 000000000..42012deac --- /dev/null +++ b/sqlspec/migrations/adapter_discovery.py @@ -0,0 +1,93 @@ +"""Adapter-specific migration discovery and loading. + +This module provides functionality to discover and load adapter-specific +migration implementations when available. +""" + +import importlib +from typing import TYPE_CHECKING, Any, cast + +from sqlspec.migrations.tracker import AsyncMigrationTracker, SyncMigrationTracker +from sqlspec.utils.logging import get_logger + +if TYPE_CHECKING: + from sqlspec.migrations.base import BaseMigrationTracker + +logger = get_logger("migrations.adapter_discovery") + +__all__ = ("discover_migration_tracker",) + + +def discover_migration_tracker(config: Any, sync: bool = True) -> "type[BaseMigrationTracker[Any]]": + """Discover and return adapter-specific migration tracker if available. + + Args: + config: The SQLSpec configuration object. + sync: Whether to discover sync (True) or async (False) tracker. + + Returns: + Adapter-specific tracker class or default tracker class. + """ + # Extract adapter name from config class + config_class_name = type(config).__name__ + + # Map config class names to adapter module names + adapter_mapping = { + "SqliteConfig": "sqlite", + "DuckDBConfig": "duckdb", + "PsycopgSyncConfig": "psycopg", + "PsycopgAsyncConfig": "psycopg", + "AsyncpgConfig": "asyncpg", + "PsqlpyConfig": "psqlpy", + "AsyncmyConfig": "asyncmy", + "AiosqliteConfig": "aiosqlite", + "OracleSyncConfig": "oracledb", + "OracleAsyncConfig": "oracledb", + "ADBCConfig": "adbc", + "BigQueryConfig": "bigquery", + } + + adapter_name = adapter_mapping.get(config_class_name) + + if not adapter_name: + logger.debug("No adapter mapping found for config %s, using default tracker", config_class_name) + return SyncMigrationTracker if sync else AsyncMigrationTracker + + # Try to import adapter-specific migrations module + try: + module_path = f"sqlspec.adapters.{adapter_name}.migrations" + migrations_module = importlib.import_module(module_path) + + # Look for adapter-specific tracker classes + if sync: + tracker_class_names = [ + "OracleSyncMigrationTracker" + if adapter_name == "oracledb" + else f"{adapter_name.title()}SyncMigrationTracker", + f"{adapter_name.upper()}SyncMigrationTracker", + "SyncMigrationTracker", + ] + else: + tracker_class_names = [ + "OracleAsyncMigrationTracker" + if adapter_name == "oracledb" + else f"{adapter_name.title()}AsyncMigrationTracker", + f"{adapter_name.upper()}AsyncMigrationTracker", + "AsyncMigrationTracker", + ] + + for class_name in tracker_class_names: + if hasattr(migrations_module, class_name): + tracker_class = getattr(migrations_module, class_name) + logger.debug("Using adapter-specific tracker: %s.%s", module_path, class_name) + return cast("type[BaseMigrationTracker[Any]]", tracker_class) + + logger.debug("No suitable tracker class found in %s, using default", module_path) + + except ImportError: + logger.debug("No adapter-specific migrations module found for %s, using default tracker", adapter_name) + except Exception as e: + logger.warning("Error loading adapter-specific migrations for %s: %s", adapter_name, e) + + # Fall back to default tracker + return SyncMigrationTracker if sync else AsyncMigrationTracker diff --git a/sqlspec/migrations/base.py b/sqlspec/migrations/base.py index 4f41ecad1..8cc4a8c90 100644 --- a/sqlspec/migrations/base.py +++ b/sqlspec/migrations/base.py @@ -3,18 +3,19 @@ This module provides abstract base classes for migration components. """ +import hashlib import operator from abc import ABC, abstractmethod from pathlib import Path -from typing import Any, Generic, Optional, TypeVar +from typing import Any, Generic, Optional, TypeVar, cast from sqlspec._sql import sql +from sqlspec.builder import Delete, Insert, Select from sqlspec.builder._ddl import CreateTable -from sqlspec.core.statement import SQL from sqlspec.loader import SQLFileLoader from sqlspec.migrations.loaders import get_migration_loader from sqlspec.utils.logging import get_logger -from sqlspec.utils.sync_tools import run_ +from sqlspec.utils.sync_tools import await_ __all__ = ("BaseMigrationCommands", "BaseMigrationRunner", "BaseMigrationTracker") @@ -36,54 +37,43 @@ def __init__(self, version_table_name: str = "ddl_migrations") -> None: """ self.version_table = version_table_name - def _get_create_table_sql(self) -> SQL: - """Get SQL for creating the tracking table. + def _get_create_table_sql(self) -> CreateTable: + """Get SQL builder for creating the tracking table. Returns: - SQL object for table creation. + SQL builder object for table creation. """ - builder = CreateTable(self.version_table) - if not hasattr(builder, "_columns"): - builder._columns = [] - if not hasattr(builder, "_constraints"): - builder._constraints = [] - if not hasattr(builder, "_table_options"): - builder._table_options = {} - return ( - builder.if_not_exists() + sql.create_table(self.version_table) + .if_not_exists() .column("version_num", "VARCHAR(32)", primary_key=True) .column("description", "TEXT") - .column("applied_at", "TIMESTAMP", not_null=True, default="CURRENT_TIMESTAMP") + .column("applied_at", "TIMESTAMP", default="CURRENT_TIMESTAMP", not_null=True) .column("execution_time_ms", "INTEGER") .column("checksum", "VARCHAR(64)") .column("applied_by", "VARCHAR(255)") - ).to_statement() + ) - def _get_current_version_sql(self) -> SQL: - """Get SQL for retrieving current version. + def _get_current_version_sql(self) -> Select: + """Get SQL builder for retrieving current version. Returns: - SQL object for version query. + SQL builder object for version query. """ + return sql.select("version_num").from_(self.version_table).order_by("version_num DESC").limit(1) - return ( - sql.select("version_num").from_(self.version_table).order_by("version_num DESC").limit(1) - ).to_statement() - - def _get_applied_migrations_sql(self) -> SQL: - """Get SQL for retrieving all applied migrations. + def _get_applied_migrations_sql(self) -> Select: + """Get SQL builder for retrieving all applied migrations. Returns: - SQL object for migrations query. + SQL builder object for migrations query. """ - - return (sql.select("*").from_(self.version_table).order_by("version_num")).to_statement() + return sql.select("*").from_(self.version_table).order_by("version_num") def _get_record_migration_sql( self, version: str, description: str, execution_time_ms: int, checksum: str, applied_by: str - ) -> SQL: - """Get SQL for recording a migration. + ) -> Insert: + """Get SQL builder for recording a migration. Args: version: Version number of the migration. @@ -93,26 +83,24 @@ def _get_record_migration_sql( applied_by: User who applied the migration. Returns: - SQL object for insert. + SQL builder object for insert. """ - return ( sql.insert(self.version_table) .columns("version_num", "description", "execution_time_ms", "checksum", "applied_by") .values(version, description, execution_time_ms, checksum, applied_by) - ).to_statement() + ) - def _get_remove_migration_sql(self, version: str) -> SQL: - """Get SQL for removing a migration record. + def _get_remove_migration_sql(self, version: str) -> Delete: + """Get SQL builder for removing a migration record. Args: version: Version number to remove. Returns: - SQL object for delete. + SQL builder object for delete. """ - - return (sql.delete().from_(self.version_table).where(sql.version_num == version)).to_statement() + return sql.delete().from_(self.version_table).where(sql.version_num == version) @abstractmethod def ensure_tracking_table(self, driver: DriverT) -> Any: @@ -176,7 +164,6 @@ def _calculate_checksum(self, content: str) -> str: Returns: MD5 checksum hex string. """ - import hashlib return hashlib.md5(content.encode()).hexdigest() # noqa: S324 @@ -226,7 +213,7 @@ def _load_migration_metadata(self, file_path: Path) -> "dict[str, Any]": has_upgrade, has_downgrade = self.loader.has_query(up_query), self.loader.has_query(down_query) else: try: - has_downgrade = bool(run_(loader.get_down_sql)(file_path)) + has_downgrade = bool(await_(loader.get_down_sql, raise_sync_error=False)(file_path)) except Exception: has_downgrade = False @@ -240,7 +227,7 @@ def _load_migration_metadata(self, file_path: Path) -> "dict[str, Any]": "loader": loader, } - def _get_migration_sql(self, migration: "dict[str, Any]", direction: str) -> Optional[SQL]: + def _get_migration_sql(self, migration: "dict[str, Any]", direction: str) -> "Optional[list[str]]": """Get migration SQL for given direction. Args: @@ -261,7 +248,7 @@ def _get_migration_sql(self, migration: "dict[str, Any]", direction: str) -> Opt try: method = loader.get_up_sql if direction == "up" else loader.get_down_sql - sql_statements = run_(method)(file_path) + sql_statements = await_(method, raise_sync_error=False)(file_path) except Exception as e: if direction == "down": @@ -271,7 +258,7 @@ def _get_migration_sql(self, migration: "dict[str, Any]", direction: str) -> Opt raise ValueError(msg) from e else: if sql_statements: - return SQL(sql_statements[0]) + return cast("list[str]", sql_statements) return None @abstractmethod @@ -312,7 +299,7 @@ def __init__(self, config: ConfigT) -> None: self.config = config migration_config = getattr(self.config, "migration_config", {}) or {} - self.version_table = migration_config.get("version_table_name", "sqlspec_migrations") + self.version_table = migration_config.get("version_table_name", "ddl_migrations") self.migrations_path = Path(migration_config.get("script_location", "migrations")) self.project_root = Path(migration_config["project_root"]) if "project_root" in migration_config else None diff --git a/sqlspec/migrations/commands.py b/sqlspec/migrations/commands.py index 704d84d37..02b294924 100644 --- a/sqlspec/migrations/commands.py +++ b/sqlspec/migrations/commands.py @@ -3,15 +3,15 @@ This module provides the main command interface for database migrations. """ -from typing import TYPE_CHECKING, Any, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast from rich.console import Console from rich.table import Table from sqlspec._sql import sql +from sqlspec.migrations.adapter_discovery import discover_migration_tracker from sqlspec.migrations.base import BaseMigrationCommands from sqlspec.migrations.runner import AsyncMigrationRunner, SyncMigrationRunner -from sqlspec.migrations.tracker import AsyncMigrationTracker, SyncMigrationTracker from sqlspec.migrations.utils import create_migration_file from sqlspec.utils.logging import get_logger from sqlspec.utils.sync_tools import await_ @@ -35,7 +35,8 @@ def __init__(self, config: "SyncConfigT") -> None: config: The SQLSpec configuration. """ super().__init__(config) - self.tracker = SyncMigrationTracker(self.version_table) + tracker_class = discover_migration_tracker(config, sync=True) + self.tracker = tracker_class(self.version_table) self.runner = SyncMigrationRunner(self.migrations_path) def init(self, directory: str, package: bool = True) -> None: @@ -47,11 +48,14 @@ def init(self, directory: str, package: bool = True) -> None: """ self.init_directory(directory, package) - def current(self, verbose: bool = False) -> None: + def current(self, verbose: bool = False) -> "Optional[str]": """Show current migration version. Args: verbose: Whether to show detailed migration history. + + Returns: + The current migration version or None if no migrations applied. """ with self.config.provide_session() as driver: self.tracker.ensure_tracking_table(driver) @@ -59,7 +63,7 @@ def current(self, verbose: bool = False) -> None: current = self.tracker.get_current_version(driver) if not current: console.print("[yellow]No migrations applied yet[/]") - return + return None console.print(f"[green]Current version:[/] {current}") @@ -84,6 +88,8 @@ def current(self, verbose: bool = False) -> None: console.print(table) + return cast("Optional[str]", current) + def upgrade(self, revision: str = "head") -> None: """Upgrade to a target revision. @@ -137,6 +143,9 @@ def downgrade(self, revision: str = "-1") -> None: to_revert = [] if revision == "-1": to_revert = [applied[-1]] + elif revision == "base": + # Revert all migrations to get back to base state + to_revert = list(reversed(applied)) else: for migration in reversed(applied): if migration["version_num"] > revision: @@ -204,7 +213,8 @@ def __init__(self, sqlspec_config: "AsyncConfigT") -> None: sqlspec_config: The SQLSpec configuration. """ super().__init__(sqlspec_config) - self.tracker = AsyncMigrationTracker(self.version_table) + tracker_class = discover_migration_tracker(sqlspec_config, sync=False) + self.tracker = tracker_class(self.version_table) self.runner = AsyncMigrationRunner(self.migrations_path) async def init(self, directory: str, package: bool = True) -> None: @@ -216,11 +226,14 @@ async def init(self, directory: str, package: bool = True) -> None: """ self.init_directory(directory, package) - async def current(self, verbose: bool = False) -> None: + async def current(self, verbose: bool = False) -> "Optional[str]": """Show current migration version. Args: verbose: Whether to show detailed migration history. + + Returns: + The current migration version or None if no migrations applied. """ async with self.config.provide_session() as driver: await self.tracker.ensure_tracking_table(driver) @@ -228,7 +241,7 @@ async def current(self, verbose: bool = False) -> None: current = await self.tracker.get_current_version(driver) if not current: console.print("[yellow]No migrations applied yet[/]") - return + return None console.print(f"[green]Current version:[/] {current}") if verbose: @@ -249,6 +262,8 @@ async def current(self, verbose: bool = False) -> None: ) console.print(table) + return cast("Optional[str]", current) + async def upgrade(self, revision: str = "head") -> None: """Upgrade to a target revision. @@ -297,6 +312,9 @@ async def downgrade(self, revision: str = "-1") -> None: to_revert = [] if revision == "-1": to_revert = [applied[-1]] + elif revision == "base": + # Revert all migrations to get back to base state + to_revert = list(reversed(applied)) else: for migration in reversed(applied): if migration["version_num"] > revision: @@ -382,20 +400,26 @@ def init(self, directory: str, package: bool = True) -> None: package: Whether to create __init__.py file. """ if self._is_async: - await_(cast("AsyncMigrationCommands[Any]", self._impl).init)(directory, package=package) + await_(cast("AsyncMigrationCommands[Any]", self._impl).init, raise_sync_error=False)( + directory, package=package + ) else: cast("SyncMigrationCommands[Any]", self._impl).init(directory, package=package) - def current(self, verbose: bool = False) -> None: + def current(self, verbose: bool = False) -> "Optional[str]": """Show current migration version. Args: verbose: Whether to show detailed migration history. + + Returns: + The current migration version or None if no migrations applied. """ if self._is_async: - await_(cast("AsyncMigrationCommands[Any]", self._impl).current, raise_sync_error=False)(verbose=verbose) - else: - cast("SyncMigrationCommands[Any]", self._impl).current(verbose=verbose) + return await_(cast("AsyncMigrationCommands[Any]", self._impl).current, raise_sync_error=False)( + verbose=verbose + ) + return cast("SyncMigrationCommands[Any]", self._impl).current(verbose=verbose) def upgrade(self, revision: str = "head") -> None: """Upgrade to a target revision. diff --git a/sqlspec/migrations/runner.py b/sqlspec/migrations/runner.py index c1a80a5de..05c3594f7 100644 --- a/sqlspec/migrations/runner.py +++ b/sqlspec/migrations/runner.py @@ -5,13 +5,13 @@ import time from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any, Optional, cast from sqlspec.core.statement import SQL from sqlspec.migrations.base import BaseMigrationRunner from sqlspec.migrations.loaders import get_migration_loader from sqlspec.utils.logging import get_logger -from sqlspec.utils.sync_tools import run_ +from sqlspec.utils.sync_tools import await_ if TYPE_CHECKING: from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase @@ -55,12 +55,15 @@ def execute_upgrade( Returns: Tuple of (sql_content, execution_time_ms). """ - upgrade_sql = self._get_migration_sql(migration, "up") - if upgrade_sql is None: + upgrade_sql_list = self._get_migration_sql(migration, "up") + if upgrade_sql_list is None: return None, 0 start_time = time.time() - driver.execute(upgrade_sql) + # Execute each SQL statement separately + for sql_statement in upgrade_sql_list: + if sql_statement.strip(): + driver.execute_script(sql_statement) execution_time = int((time.time() - start_time) * 1000) return None, execution_time @@ -76,12 +79,15 @@ def execute_downgrade( Returns: Tuple of (sql_content, execution_time_ms). """ - downgrade_sql = self._get_migration_sql(migration, "down") - if downgrade_sql is None: + downgrade_sql_list = self._get_migration_sql(migration, "down") + if downgrade_sql_list is None: return None, 0 start_time = time.time() - driver.execute(downgrade_sql) + # Execute each SQL statement separately + for sql_statement in downgrade_sql_list: + if sql_statement.strip(): + driver.execute_script(sql_statement) execution_time = int((time.time() - start_time) * 1000) return None, execution_time @@ -103,8 +109,8 @@ def load_all_migrations(self) -> "dict[str, SQL]": loader = get_migration_loader(file_path, self.migrations_path, self.project_root) try: - up_sql = run_(loader.get_up_sql)(file_path) - down_sql = run_(loader.get_down_sql)(file_path) + up_sql = await_(loader.get_up_sql, raise_sync_error=False)(file_path) + down_sql = await_(loader.get_down_sql, raise_sync_error=False)(file_path) if up_sql: all_queries[f"migrate-{version}-up"] = SQL(up_sql[0]) @@ -137,7 +143,80 @@ async def load_migration(self, file_path: Path) -> "dict[str, Any]": Returns: Dictionary containing migration metadata. """ - return self._load_migration_metadata(file_path) + return await self._load_migration_metadata_async(file_path) + + async def _load_migration_metadata_async(self, file_path: Path) -> "dict[str, Any]": + """Load migration metadata from file (async version). + + Args: + file_path: Path to the migration file. + + Returns: + Migration metadata dictionary. + """ + loader = get_migration_loader(file_path, self.migrations_path, self.project_root) + loader.validate_migration_file(file_path) + content = file_path.read_text(encoding="utf-8") + checksum = self._calculate_checksum(content) + version = self._extract_version(file_path.name) + description = file_path.stem.split("_", 1)[1] if "_" in file_path.stem else "" + + has_upgrade, has_downgrade = True, False + + if file_path.suffix == ".sql": + up_query, down_query = f"migrate-{version}-up", f"migrate-{version}-down" + self.loader.clear_cache() + self.loader.load_sql(file_path) + has_upgrade, has_downgrade = self.loader.has_query(up_query), self.loader.has_query(down_query) + else: + try: + has_downgrade = bool(await loader.get_down_sql(file_path)) + except Exception: + has_downgrade = False + + return { + "version": version, + "description": description, + "file_path": file_path, + "checksum": checksum, + "has_upgrade": has_upgrade, + "has_downgrade": has_downgrade, + "loader": loader, + } + + async def _get_migration_sql_async(self, migration: "dict[str, Any]", direction: str) -> "Optional[list[str]]": + """Get migration SQL for given direction (async version). + + Args: + migration: Migration metadata. + direction: Either 'up' or 'down'. + + Returns: + SQL statements for the migration. + """ + if not migration.get(f"has_{direction}grade"): + if direction == "down": + logger.warning("Migration %s has no downgrade query", migration["version"]) + return None + msg = f"Migration {migration['version']} has no upgrade query" + raise ValueError(msg) + + file_path, loader = migration["file_path"], migration["loader"] + + try: + method = loader.get_up_sql if direction == "up" else loader.get_down_sql + sql_statements = await method(file_path) + + except Exception as e: + if direction == "down": + logger.warning("Failed to load downgrade for migration %s: %s", migration["version"], e) + return None + msg = f"Failed to load upgrade for migration {migration['version']}: {e}" + raise ValueError(msg) from e + else: + if sql_statements: + return cast("list[str]", sql_statements) + return None async def execute_upgrade( self, driver: "AsyncDriverAdapterBase", migration: "dict[str, Any]" @@ -151,12 +230,15 @@ async def execute_upgrade( Returns: Tuple of (sql_content, execution_time_ms). """ - upgrade_sql = self._get_migration_sql(migration, "up") - if upgrade_sql is None: + upgrade_sql_list = await self._get_migration_sql_async(migration, "up") + if upgrade_sql_list is None: return None, 0 start_time = time.time() - await driver.execute(upgrade_sql) + # Execute each SQL statement separately + for sql_statement in upgrade_sql_list: + if sql_statement.strip(): + await driver.execute_script(sql_statement) execution_time = int((time.time() - start_time) * 1000) return None, execution_time @@ -172,12 +254,15 @@ async def execute_downgrade( Returns: Tuple of (sql_content, execution_time_ms). """ - downgrade_sql = self._get_migration_sql(migration, "down") - if downgrade_sql is None: + downgrade_sql_list = await self._get_migration_sql_async(migration, "down") + if downgrade_sql_list is None: return None, 0 start_time = time.time() - await driver.execute(downgrade_sql) + # Execute each SQL statement separately + for sql_statement in downgrade_sql_list: + if sql_statement.strip(): + await driver.execute_script(sql_statement) execution_time = int((time.time() - start_time) * 1000) return None, execution_time diff --git a/sqlspec/migrations/tracker.py b/sqlspec/migrations/tracker.py index 7218d7909..68b9fe101 100644 --- a/sqlspec/migrations/tracker.py +++ b/sqlspec/migrations/tracker.py @@ -7,12 +7,15 @@ from typing import TYPE_CHECKING, Any, Optional from sqlspec.migrations.base import BaseMigrationTracker +from sqlspec.utils.logging import get_logger if TYPE_CHECKING: from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase __all__ = ("AsyncMigrationTracker", "SyncMigrationTracker") +logger = get_logger("migrations.tracker") + class SyncMigrationTracker(BaseMigrationTracker["SyncDriverAdapterBase"]): """Tracks applied migrations in the database.""" @@ -24,6 +27,7 @@ def ensure_tracking_table(self, driver: "SyncDriverAdapterBase") -> None: driver: The database driver to use. """ driver.execute(self._get_create_table_sql()) + self._safe_commit(driver) def get_current_version(self, driver: "SyncDriverAdapterBase") -> Optional[str]: """Get the latest applied migration version. @@ -66,6 +70,7 @@ def record_migration( version, description, execution_time_ms, checksum, os.environ.get("USER", "unknown") ) ) + self._safe_commit(driver) def remove_migration(self, driver: "SyncDriverAdapterBase", version: str) -> None: """Remove a migration record (used during downgrade). @@ -75,6 +80,31 @@ def remove_migration(self, driver: "SyncDriverAdapterBase", version: str) -> Non version: Version number to remove. """ driver.execute(self._get_remove_migration_sql(version)) + self._safe_commit(driver) + + def _safe_commit(self, driver: "SyncDriverAdapterBase") -> None: + """Safely commit a transaction only if autocommit is disabled. + + Args: + driver: The database driver to use. + """ + try: + # Check if the connection has autocommit enabled + connection = getattr(driver, "connection", None) + if connection and hasattr(connection, "autocommit") and getattr(connection, "autocommit", False): + return + + # For ADBC and other drivers, check the driver_features + driver_features = getattr(driver, "driver_features", {}) + if driver_features and driver_features.get("autocommit", False): + return + + # Safe to commit manually + driver.commit() + except Exception: + # If commit fails due to no active transaction, that's acceptable + # Some drivers with autocommit will fail when trying to commit + logger.debug("Failed to commit transaction, likely due to autocommit being enabled") class AsyncMigrationTracker(BaseMigrationTracker["AsyncDriverAdapterBase"]): @@ -87,6 +117,7 @@ async def ensure_tracking_table(self, driver: "AsyncDriverAdapterBase") -> None: driver: The database driver to use. """ await driver.execute(self._get_create_table_sql()) + await self._safe_commit_async(driver) async def get_current_version(self, driver: "AsyncDriverAdapterBase") -> Optional[str]: """Get the latest applied migration version. @@ -129,6 +160,7 @@ async def record_migration( version, description, execution_time_ms, checksum, os.environ.get("USER", "unknown") ) ) + await self._safe_commit_async(driver) async def remove_migration(self, driver: "AsyncDriverAdapterBase", version: str) -> None: """Remove a migration record (used during downgrade). @@ -138,3 +170,28 @@ async def remove_migration(self, driver: "AsyncDriverAdapterBase", version: str) version: Version number to remove. """ await driver.execute(self._get_remove_migration_sql(version)) + await self._safe_commit_async(driver) + + async def _safe_commit_async(self, driver: "AsyncDriverAdapterBase") -> None: + """Safely commit a transaction only if autocommit is disabled. + + Args: + driver: The database driver to use. + """ + try: + # Check if the connection has autocommit enabled + connection = getattr(driver, "connection", None) + if connection and hasattr(connection, "autocommit") and getattr(connection, "autocommit", False): + return + + # For ADBC and other drivers, check the driver_features + driver_features = getattr(driver, "driver_features", {}) + if driver_features and driver_features.get("autocommit", False): + return + + # Safe to commit manually + await driver.commit() + except Exception: + # If commit fails due to no active transaction, that's acceptable + # Some drivers with autocommit will fail when trying to commit + logger.debug("Failed to commit transaction, likely due to autocommit being enabled") diff --git a/sqlspec/utils/sync_tools.py b/sqlspec/utils/sync_tools.py index 05d59089d..76da734e1 100644 --- a/sqlspec/utils/sync_tools.py +++ b/sqlspec/utils/sync_tools.py @@ -26,7 +26,16 @@ class CapacityLimiter: """Limits the number of concurrent operations using a semaphore.""" def __init__(self, total_tokens: int) -> None: - self._semaphore = asyncio.Semaphore(total_tokens) + self._total_tokens = total_tokens + # Lazy initialization for Python 3.9 compatibility (asyncio.Semaphore can't be created without event loop) + self._semaphore_instance: Optional[asyncio.Semaphore] = None + + @property + def _semaphore(self) -> asyncio.Semaphore: + """Lazy initialization of asyncio.Semaphore for Python 3.9 compatibility.""" + if self._semaphore_instance is None: + self._semaphore_instance = asyncio.Semaphore(self._total_tokens) + return self._semaphore_instance async def acquire(self) -> None: await self._semaphore.acquire() @@ -36,11 +45,13 @@ def release(self) -> None: @property def total_tokens(self) -> int: - return self._semaphore._value + return self._total_tokens @total_tokens.setter def total_tokens(self, value: int) -> None: - self._semaphore = asyncio.Semaphore(value) + self._total_tokens = value + # Reset the semaphore instance so it gets recreated with new value + self._semaphore_instance = None async def __aenter__(self) -> None: await self.acquire() @@ -76,7 +87,14 @@ def wrapper(*args: "ParamSpecT.args", **kwargs: "ParamSpecT.kwargs") -> "ReturnT loop = None if loop is not None: - return asyncio.run(partial_f()) + if loop.is_running(): + import concurrent.futures + + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(asyncio.run, partial_f()) + return future.result() + else: + return asyncio.run(partial_f()) if uvloop and sys.platform != "win32": uvloop.install() # pyright: ignore[reportUnknownMemberType] return asyncio.run(partial_f()) diff --git a/tests/integration/test_adapters/test_adbc/test_migrations.py b/tests/integration/test_adapters/test_adbc/test_migrations.py new file mode 100644 index 000000000..c2bff8cae --- /dev/null +++ b/tests/integration/test_adapters/test_adbc/test_migrations.py @@ -0,0 +1,333 @@ +"""Integration tests for ADBC migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest + +from sqlspec.adapters.adbc.config import AdbcConfig +from sqlspec.migrations.commands import MigrationCommands + + +@pytest.mark.xdist_group("migrations") +def test_adbc_sqlite_migration_full_workflow() -> None: + """Test full ADBC SQLite migration workflow: init -> create -> upgrade -> downgrade.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create ADBC config with SQLite driver and migration directory + config = AdbcConfig( + connection_config={"driver_name": "adbc_driver_sqlite", "uri": f"file:{db_path}", "autocommit": True}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = '''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + # 3. Apply migration (upgrade) + commands.upgrade() + + # 4. Verify migration was applied + with config.provide_session() as driver: + # Check that table exists + result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='users'") + assert len(result.data) == 1 + + # Insert test data + driver.execute("INSERT INTO users (name, email) VALUES (?, ?)", ("John Doe", "john@example.com")) + + # Verify data + users_result = driver.execute("SELECT * FROM users") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + commands.downgrade("base") + + # 6. Verify table was dropped + with config.provide_session() as driver: + result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='users'") + assert len(result.data) == 0 + + +@pytest.mark.xdist_group("migrations") +def test_adbc_postgresql_migration_workflow() -> None: + """Test ADBC PostgreSQL migration workflow with test database.""" + pytest.skip("PostgreSQL ADBC driver tests require running PostgreSQL instance") + # This test would require a PostgreSQL instance running in CI + # Implementation would be similar but with PostgreSQL-specific SQL and config + + +@pytest.mark.xdist_group("migrations") +def test_adbc_multiple_migrations_workflow() -> None: + """Test ADBC workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create ADBC config with SQLite driver + config = AdbcConfig( + connection_config={"driver_name": "adbc_driver_sqlite", "uri": f"file:{db_path}", "autocommit": True}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # First migration - create users table + migration1_content = '''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Second migration - create posts table + migration2_content = '''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE posts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + title TEXT NOT NULL, + content TEXT, + user_id INTEGER, + FOREIGN KEY (user_id) REFERENCES users (id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS posts"] +''' + + # Write migration files + (migration_dir / "0001_create_users.py").write_text(migration1_content) + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + # Apply all migrations + commands.upgrade() + + # Verify both tables exist + with config.provide_session() as driver: + tables_result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name") + table_names = [t["name"] for t in tables_result.data] + assert "users" in table_names + assert "posts" in table_names + + # Test the relationship + driver.execute("INSERT INTO users (name, email) VALUES (?, ?)", ("Author", "author@example.com")) + driver.execute( + "INSERT INTO posts (title, content, user_id) VALUES (?, ?, ?)", ("My Post", "Post content", 1) + ) + + posts_result = driver.execute("SELECT * FROM posts") + assert len(posts_result.data) == 1 + assert posts_result.data[0]["title"] == "My Post" + + # Downgrade to revision 0001 (should drop posts table) + commands.downgrade("0001") + + with config.provide_session() as driver: + tables_result = driver.execute("SELECT name FROM sqlite_master WHERE type='table'") + table_names = [t["name"] for t in tables_result.data] + assert "users" in table_names + assert "posts" not in table_names + + # Downgrade to base (should drop all tables) + commands.downgrade("base") + + with config.provide_session() as driver: + tables_result = driver.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'" + ) + # Should only have migration tracking table remaining + table_names = [t["name"] for t in tables_result.data if not t["name"].startswith("sqlspec_")] + assert len(table_names) == 0 + + +@pytest.mark.xdist_group("migrations") +def test_adbc_migration_current_command() -> None: + """Test the current migration command shows correct version for ADBC.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create ADBC config with SQLite driver + config = AdbcConfig( + connection_config={"driver_name": "adbc_driver_sqlite", "uri": f"file:{db_path}", "autocommit": True}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Should show no current version initially + commands.current(verbose=False) # This just outputs to console + + # Create and apply a migration + migration_content = '''"""Test migration.""" + + +def up(): + """Create test table.""" + return ["CREATE TABLE test_table (id INTEGER PRIMARY KEY)"] + + +def down(): + """Drop test table.""" + return ["DROP TABLE IF EXISTS test_table"] +''' + + (migration_dir / "0001_test.py").write_text(migration_content) + + # Apply migration + commands.upgrade() + + # Check current version (this just outputs, can't assert return value) + commands.current(verbose=True) + + +@pytest.mark.xdist_group("migrations") +def test_adbc_migration_error_handling() -> None: + """Test ADBC migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create ADBC config with SQLite driver + config = AdbcConfig( + connection_config={"driver_name": "adbc_driver_sqlite", "uri": f"file:{db_path}", "autocommit": True}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Create a migration with syntax error + migration_content = '''"""Bad migration.""" + + +def up(): + """Invalid SQL - should cause error.""" + return ["CREATE SOME TABLE invalid_sql"] + + +def down(): + """No downgrade needed.""" + return [] +''' + + (migration_dir / "0001_bad.py").write_text(migration_content) + + # Attempting to upgrade should raise an error + with pytest.raises(Exception): # Will be wrapped in some migration exception + commands.upgrade() + + +@pytest.mark.xdist_group("migrations") +def test_adbc_migration_with_transactions() -> None: + """Test ADBC migrations work properly with transactions.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create ADBC config with SQLite driver + config = AdbcConfig( + connection_config={"driver_name": "adbc_driver_sqlite", "uri": f"file:{db_path}", "autocommit": True}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Create a migration that uses transactions + migration_content = '''"""Migration with multiple operations.""" + + +def up(): + """Create customers table with data.""" + return [ + """CREATE TABLE customers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL + )""", + "INSERT INTO customers (name) VALUES ('Customer 1')", + "INSERT INTO customers (name) VALUES ('Customer 2')" + ] + + +def down(): + """Drop customers table.""" + return ["DROP TABLE IF EXISTS customers"] +''' + + (migration_dir / "0001_transaction_test.py").write_text(migration_content) + + # Apply migration + commands.upgrade() + + # Verify both table and data exist + with config.provide_session() as driver: + customers_result = driver.execute("SELECT * FROM customers ORDER BY name") + assert len(customers_result.data) == 2 + assert customers_result.data[0]["name"] == "Customer 1" + assert customers_result.data[1]["name"] == "Customer 2" + + # Downgrade should remove everything + commands.downgrade("base") + + with config.provide_session() as driver: + result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='customers'") + assert len(result.data) == 0 diff --git a/tests/integration/test_adapters/test_aiosqlite/test_migrations.py b/tests/integration/test_adapters/test_aiosqlite/test_migrations.py new file mode 100644 index 000000000..3a7ece4a8 --- /dev/null +++ b/tests/integration/test_adapters/test_aiosqlite/test_migrations.py @@ -0,0 +1,385 @@ +"""Integration tests for AioSQLite migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest + +from sqlspec.adapters.aiosqlite.config import AiosqliteConfig +from sqlspec.migrations.commands import AsyncMigrationCommands + + +@pytest.mark.xdist_group("migrations") +async def test_aiosqlite_migration_full_workflow() -> None: + """Test full AioSQLite migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "aiosqlite_full_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create AioSQLite config with migration directory + config = AiosqliteConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + # 3. Apply migration (upgrade) + await commands.upgrade() + + # 4. Verify migration was applied + # Note: We use the unified MigrationCommands interface which handles async/sync internally + async with config.provide_session() as driver: + # Check that table exists + result = await driver.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{users_table}'") + assert len(result.data) == 1 + + # Insert test data + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (?, ?)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = await driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + try: + # 5. Downgrade migration + await commands.downgrade("base") + + # 6. Verify table was dropped + async with config.provide_session() as driver: + result = await driver.execute( + f"SELECT name FROM sqlite_master WHERE type='table' AND name='{users_table}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_aiosqlite_multiple_migrations_workflow() -> None: + """Test AioSQLite workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "aiosqlite_multiple_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create AioSQLite config with migration directory + config = AiosqliteConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # Initialize migrations + await commands.init(str(migration_dir), package=True) + + # First migration - create users table + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + + # Second migration - create posts table + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + title TEXT NOT NULL, + content TEXT, + user_id INTEGER, + FOREIGN KEY (user_id) REFERENCES {users_table} (id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS {posts_table}"] +''' + + # Write migration files + (migration_dir / "0001_create_users.py").write_text(migration1_content) + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # Apply all migrations + await commands.upgrade() + + # Verify both tables exist + async with config.provide_session() as driver: + tables_result = await driver.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name") + table_names = [t["name"] for t in tables_result.data] + assert users_table in table_names + assert posts_table in table_names + + # Test the relationship + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (?, ?)", ("Author", "author@example.com") + ) + await driver.execute( + f"INSERT INTO {posts_table} (title, content, user_id) VALUES (?, ?, ?)", + ("My Post", "Post content", 1), + ) + + posts_result = await driver.execute(f"SELECT * FROM {posts_table}") + assert len(posts_result.data) == 1 + assert posts_result.data[0]["title"] == "My Post" + + # Downgrade to revision 0001 (should drop posts table) + await commands.downgrade("0001") + + async with config.provide_session() as driver: + tables_result = await driver.execute("SELECT name FROM sqlite_master WHERE type='table'") + table_names = [t["name"] for t in tables_result.data] + assert users_table in table_names + assert posts_table not in table_names + + # Downgrade to base (should drop all tables) + await commands.downgrade("base") + + async with config.provide_session() as driver: + tables_result = await driver.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'" + ) + # Should only have migration tracking table remaining + table_names = [t["name"] for t in tables_result.data if not t["name"].startswith("sqlspec_")] + assert len(table_names) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_aiosqlite_migration_current_command() -> None: + """Test the current migration command shows correct version for AioSQLite.""" + # Generate unique table names for this test + test_id = "aiosqlite_current_cmd" + migration_table = f"sqlspec_migrations_{test_id}" + test_table = f"test_table_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create AioSQLite config with migration directory + config = AiosqliteConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Should show no current version initially + await commands.current(verbose=False) # This just outputs to console + + # Create and apply a migration + migration_content = f'''"""Test migration.""" + + +def up(): + """Create test table.""" + return ["CREATE TABLE {test_table} (id INTEGER PRIMARY KEY)"] + + +def down(): + """Drop test table.""" + return ["DROP TABLE IF EXISTS {test_table}"] +''' + + (migration_dir / "0001_test.py").write_text(migration_content) + + # Apply migration + await commands.upgrade() + + # Check current version (this just outputs, can't assert return value) + await commands.current(verbose=True) + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_aiosqlite_migration_error_handling() -> None: + """Test AioSQLite migration error handling.""" + # Generate unique table names for this test + test_id = "aiosqlite_error_handling" + migration_table = f"sqlspec_migrations_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create AioSQLite config with migration directory + config = AiosqliteConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Create a migration with syntax error + migration_content = '''"""Bad migration.""" + + +def up(): + """Invalid SQL - should cause error.""" + return ["CREATE A TABLE invalid_sql"] + + +def down(): + """No downgrade needed.""" + return [] +''' + + (migration_dir / "0001_bad.py").write_text(migration_content) + + # Attempting to upgrade should raise an error + with pytest.raises(Exception): # Will be wrapped in some migration exception + await commands.upgrade() + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_aiosqlite_migration_with_transactions() -> None: + """Test AioSQLite migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "aiosqlite_transactions" + migration_table = f"sqlspec_migrations_{test_id}" + customers_table = f"customers_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.db" + + # Create AioSQLite config with migration directory + config = AiosqliteConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Create a migration that uses transactions + migration_content = f'''"""Migration with multiple operations.""" + + +def up(): + """Create customers table with data.""" + return [ + """CREATE TABLE {customers_table} ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL + )""", + "INSERT INTO {customers_table} (name) VALUES ('Customer 1')", + "INSERT INTO {customers_table} (name) VALUES ('Customer 2')" + ] + + +def down(): + """Drop customers table.""" + return ["DROP TABLE IF EXISTS {customers_table}"] +''' + + (migration_dir / "0001_transaction_test.py").write_text(migration_content) + + # Apply migration + await commands.upgrade() + + # Verify both table and data exist + async with config.provide_session() as driver: + customers_result = await driver.execute(f"SELECT * FROM {customers_table} ORDER BY name") + assert len(customers_result.data) == 2 + assert customers_result.data[0]["name"] == "Customer 1" + assert customers_result.data[1]["name"] == "Customer 2" + + # Downgrade should remove everything + await commands.downgrade("base") + + async with config.provide_session() as driver: + result = await driver.execute( + f"SELECT name FROM sqlite_master WHERE type='table' AND name='{customers_table}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncmy/test_migrations.py b/tests/integration/test_adapters/test_asyncmy/test_migrations.py new file mode 100644 index 000000000..15fd2ca65 --- /dev/null +++ b/tests/integration/test_adapters/test_asyncmy/test_migrations.py @@ -0,0 +1,464 @@ +"""Integration tests for Asyncmy (MySQL) migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest +from pytest_databases.docker.mysql import MySQLService + +from sqlspec.adapters.asyncmy.config import AsyncmyConfig +from sqlspec.migrations.commands import AsyncMigrationCommands + + +@pytest.mark.xdist_group("migrations") +async def test_asyncmy_migration_full_workflow(mysql_service: MySQLService) -> None: + """Test full Asyncmy migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "asyncmy_full_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Asyncmy config with migration directory + config = AsyncmyConfig( + pool_config={ + "host": mysql_service.host, + "port": mysql_service.port, + "user": mysql_service.user, + "password": mysql_service.password, + "database": mysql_service.db, + "autocommit": True, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id INT AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + await commands.upgrade() + + # 4. Verify migration was applied + async with config.provide_session() as driver: + # Check that table exists + result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name = '{users_table}'", + (mysql_service.db,), + ) + assert len(result.data) == 1 + + # Insert test data + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = await driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + await commands.downgrade("base") + + # 6. Verify table was dropped + async with config.provide_session() as driver: + result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name = '{users_table}'", + (mysql_service.db,), + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncmy_multiple_migrations_workflow(mysql_service: MySQLService) -> None: + """Test Asyncmy workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "asyncmy_multiple_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncmyConfig( + pool_config={ + "host": mysql_service.host, + "port": mysql_service.port, + "user": mysql_service.user, + "password": mysql_service.password, + "database": mysql_service.db, + "autocommit": True, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id INT AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id INT AUTO_INCREMENT PRIMARY KEY, + title VARCHAR(255) NOT NULL, + content TEXT, + user_id INT, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (user_id) REFERENCES {users_table}(id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS {posts_table}"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + await commands.upgrade() + + # 5. Verify both tables exist + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name = '{users_table}'", + (mysql_service.db,), + ) + posts_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name = '{posts_table}'", + (mysql_service.db,), + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # Test relational integrity + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", ("John Doe", "john@example.com") + ) + await driver.execute( + f"INSERT INTO {posts_table} (title, content, user_id) VALUES (%s, %s, %s)", + ("Test Post", "This is a test post", 1), + ) + + # 6. Downgrade to version 0001 (should remove posts table) + await commands.downgrade("0001") + + # 7. Verify only users table remains + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name = '{users_table}'", + (mysql_service.db,), + ) + posts_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name = '{posts_table}'", + (mysql_service.db,), + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + await commands.downgrade("base") + + # 9. Verify all tables are gone + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_name IN ('{users_table}', '{posts_table}')", + (mysql_service.db,), + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncmy_migration_current_command(mysql_service: MySQLService) -> None: + """Test the current migration command shows correct version for Asyncmy.""" + # Generate unique table names for this test + test_id = "asyncmy_current_cmd" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncmyConfig( + pool_config={ + "host": mysql_service.host, + "port": mysql_service.port, + "user": mysql_service.user, + "password": mysql_service.password, + "database": mysql_service.db, + "autocommit": True, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = await commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id INT AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + await commands.upgrade() + + # 5. Check current version is now 0001 + current_version = await commands.current() + assert current_version == "0001" + + # 6. Downgrade + await commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = await commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncmy_migration_error_handling(mysql_service: MySQLService) -> None: + """Test Asyncmy migration error handling.""" + # Generate unique table names for this test + test_id = "asyncmy_error_handling" + migration_table = f"sqlspec_migrations_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncmyConfig( + pool_config={ + "host": mysql_service.host, + "port": mysql_service.port, + "user": mysql_service.user, + "password": mysql_service.password, + "database": mysql_service.db, + "autocommit": True, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE IF EXISTS invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + await commands.upgrade() + + # 4. Verify no migration was recorded due to error + async with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = await driver.execute(f"SELECT COUNT(*) as count FROM {migration_table}") + assert result.data[0]["count"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncmy_migration_with_transactions(mysql_service: MySQLService) -> None: + """Test Asyncmy migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "asyncmy_transactions" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncmyConfig( + pool_config={ + "host": mysql_service.host, + "port": mysql_service.port, + "user": mysql_service.user, + "password": mysql_service.password, + "database": mysql_service.db, + "autocommit": False, # Disable autocommit for transaction tests + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id INT AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + await commands.upgrade() + + # 4. Test transaction behavior with the session + async with config.provide_session() as driver: + # Start manual transaction + await driver.begin() + try: + # Insert data within transaction + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", + ("Transaction User", "trans@example.com"), + ) + + # Verify data exists within transaction + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + await driver.commit() + except Exception: + await driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + async with config.provide_session() as driver: + await driver.begin() + try: + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", + ("Rollback User", "rollback@example.com"), + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + await driver.rollback() + + # Verify rollback - data should not exist + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncpg/test_migrations.py b/tests/integration/test_adapters/test_asyncpg/test_migrations.py new file mode 100644 index 000000000..c55d16df3 --- /dev/null +++ b/tests/integration/test_adapters/test_asyncpg/test_migrations.py @@ -0,0 +1,439 @@ +"""Integration tests for AsyncPG (PostgreSQL) migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest +from pytest_databases.docker.postgres import PostgresService + +from sqlspec.adapters.asyncpg.config import AsyncpgConfig +from sqlspec.migrations.commands import AsyncMigrationCommands + + +@pytest.mark.xdist_group("migrations") +async def test_asyncpg_migration_full_workflow(postgres_service: PostgresService) -> None: + """Test full AsyncPG migration workflow: init -> create -> upgrade -> downgrade.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create AsyncPG config with migration directory + config = AsyncpgConfig( + pool_config={ + "host": postgres_service.host, + "port": postgres_service.port, + "user": postgres_service.user, + "password": postgres_service.password, + "database": postgres_service.database, + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_asyncpg", + }, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = '''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + await commands.upgrade() + + # 4. Verify migration was applied + async with config.provide_session() as driver: + # Check that table exists + result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users'" + ) + assert len(result.data) == 1 + + # Insert test data + await driver.execute( + "INSERT INTO users (name, email) VALUES ($1, $2)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = await driver.execute("SELECT * FROM users") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + await commands.downgrade("base") + + # 6. Verify table was dropped + async with config.provide_session() as driver: + result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncpg_multiple_migrations_workflow(postgres_service: PostgresService) -> None: + """Test AsyncPG workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncpgConfig( + pool_config={ + "host": postgres_service.host, + "port": postgres_service.port, + "user": postgres_service.user, + "password": postgres_service.password, + "database": postgres_service.database, + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_asyncpg", + }, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = '''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = '''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE posts ( + id SERIAL PRIMARY KEY, + title VARCHAR(255) NOT NULL, + content TEXT, + user_id INTEGER REFERENCES users(id), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS posts"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + await commands.upgrade() + + # 5. Verify both tables exist + async with config.provide_session() as driver: + users_result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users'" + ) + posts_result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'posts'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # Test relational integrity + await driver.execute( + "INSERT INTO users (name, email) VALUES ($1, $2)", ("John Doe", "john@example.com") + ) + await driver.execute( + "INSERT INTO posts (title, content, user_id) VALUES ($1, $2, $3)", + ("Test Post", "This is a test post", 1), + ) + + # 6. Downgrade to version 0001 (should remove posts table) + await commands.downgrade("0001") + + # 7. Verify only users table remains + async with config.provide_session() as driver: + users_result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users'" + ) + posts_result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'posts'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + await commands.downgrade("base") + + # 9. Verify all tables are gone + async with config.provide_session() as driver: + users_result = await driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name IN ('users', 'posts')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncpg_migration_current_command(postgres_service: PostgresService) -> None: + """Test the current migration command shows correct version for AsyncPG.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncpgConfig( + pool_config={ + "host": postgres_service.host, + "port": postgres_service.port, + "user": postgres_service.user, + "password": postgres_service.password, + "database": postgres_service.database, + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_asyncpg", + }, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = await commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = '''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + await commands.upgrade() + + # 5. Check current version is now 0001 + current_version = await commands.current() + assert current_version == "0001" + + # 6. Downgrade + await commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = await commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncpg_migration_error_handling(postgres_service: PostgresService) -> None: + """Test AsyncPG migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncpgConfig( + pool_config={ + "host": postgres_service.host, + "port": postgres_service.port, + "user": postgres_service.user, + "password": postgres_service.password, + "database": postgres_service.database, + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_asyncpg", + }, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE IF EXISTS invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + await commands.upgrade() + + # 4. Verify no migration was recorded due to error + async with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = await driver.execute("SELECT COUNT(*) as count FROM sqlspec_migrations") + assert result.data[0]["count"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_asyncpg_migration_with_transactions(postgres_service: PostgresService) -> None: + """Test AsyncPG migrations work properly with transactions.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = AsyncpgConfig( + pool_config={ + "host": postgres_service.host, + "port": postgres_service.port, + "user": postgres_service.user, + "password": postgres_service.password, + "database": postgres_service.database, + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_asyncpg", + }, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = '''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + await commands.upgrade() + + # 4. Test transaction behavior with the session + async with config.provide_session() as driver: + # Start manual transaction + await driver.begin() + try: + # Insert data within transaction + await driver.execute( + "INSERT INTO users (name, email) VALUES ($1, $2)", ("Transaction User", "trans@example.com") + ) + + # Verify data exists within transaction + result = await driver.execute("SELECT * FROM users WHERE name = 'Transaction User'") + assert len(result.data) == 1 + await driver.commit() + except Exception: + await driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = await driver.execute("SELECT * FROM users WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + async with config.provide_session() as driver: + await driver.begin() + try: + await driver.execute( + "INSERT INTO users (name, email) VALUES ($1, $2)", ("Rollback User", "rollback@example.com") + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + await driver.rollback() + + # Verify rollback - data should not exist + result = await driver.execute("SELECT * FROM users WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() diff --git a/tests/integration/test_adapters/test_bigquery/test_migrations.py b/tests/integration/test_adapters/test_bigquery/test_migrations.py new file mode 100644 index 000000000..a24f7edd3 --- /dev/null +++ b/tests/integration/test_adapters/test_bigquery/test_migrations.py @@ -0,0 +1,431 @@ +"""Integration tests for BigQuery migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest +from pytest_databases.docker.bigquery import BigQueryService + +from sqlspec.adapters.bigquery.config import BigQueryConfig +from sqlspec.migrations.commands import MigrationCommands + + +@pytest.mark.xdist_group("migrations") +def test_bigquery_migration_full_workflow(bigquery_service: BigQueryService) -> None: + """Test full BigQuery migration workflow: init -> create -> upgrade -> downgrade.""" + pytest.skip("BigQuery migration tests require real BigQuery backend (emulator has SQL syntax limitations)") + # Generate unique table names for this test + test_id = "bigquery_full_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create BigQuery config with migration directory + from google.api_core.client_options import ClientOptions + from google.auth.credentials import AnonymousCredentials + + config = BigQueryConfig( + connection_config={ + "project": bigquery_service.project, + "dataset_id": bigquery_service.dataset, + "client_options": ClientOptions(api_endpoint=f"http://{bigquery_service.host}:{bigquery_service.port}"), + "credentials": AnonymousCredentials(), # type: ignore[no-untyped-call] + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE OR REPLACE TABLE `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` ( + id INT64, + name STRING NOT NULL, + email STRING, + created_at TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}`"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + commands.upgrade() + + # 4. Verify migration was applied + with config.provide_session() as driver: + # Check that table exists + result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name = '{users_table}'" + ) + assert len(result.data) == 1 + + # Insert test data + driver.execute( + f"INSERT INTO `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` (id, name, email) VALUES (@id, @name, @email)", + {"id": 1, "name": "John Doe", "email": "john@example.com"}, + ) + + # Verify data + users_result = driver.execute( + f"SELECT * FROM `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}`" + ) + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + commands.downgrade("base") + + # 6. Verify table was dropped + with config.provide_session() as driver: + result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name = '{users_table}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_bigquery_multiple_migrations_workflow(bigquery_service: BigQueryService) -> None: + """Test BigQuery workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + pytest.skip("BigQuery migration tests require real BigQuery backend (emulator has SQL syntax limitations)") + # Generate unique table names for this test + test_id = "bigquery_multi_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + from google.api_core.client_options import ClientOptions + from google.auth.credentials import AnonymousCredentials + + config = BigQueryConfig( + connection_config={ + "project": bigquery_service.project, + "dataset_id": bigquery_service.dataset, + "client_options": ClientOptions(api_endpoint=f"http://{bigquery_service.host}:{bigquery_service.port}"), + "credentials": AnonymousCredentials(), # type: ignore[no-untyped-call] + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE OR REPLACE TABLE `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` ( + id INT64, + name STRING NOT NULL, + email STRING + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}`"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE OR REPLACE TABLE `{bigquery_service.project}.{bigquery_service.dataset}.{posts_table}` ( + id INT64, + title STRING NOT NULL, + content STRING, + user_id INT64 + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS `{bigquery_service.project}.{bigquery_service.dataset}.{posts_table}`"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + commands.upgrade() + + # 5. Verify both tables exist + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name = '{users_table}'" + ) + posts_result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # 6. Downgrade to version 0001 (should remove posts table) + commands.downgrade("0001") + + # 7. Verify only users table remains + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name = '{users_table}'" + ) + posts_result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + commands.downgrade("base") + + # 9. Verify all tables are gone + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM `{bigquery_service.project}.{bigquery_service.dataset}.INFORMATION_SCHEMA.TABLES` WHERE table_name IN ('{users_table}', '{posts_table}')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_bigquery_migration_current_command(bigquery_service: BigQueryService) -> None: + """Test the current migration command shows correct version for BigQuery.""" + pytest.skip("BigQuery migration tests require real BigQuery backend (emulator has SQL syntax limitations)") + # Generate unique table names for this test + test_id = "bigquery_current_cmd" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + from google.api_core.client_options import ClientOptions + from google.auth.credentials import AnonymousCredentials + + config = BigQueryConfig( + connection_config={ + "project": bigquery_service.project, + "dataset_id": bigquery_service.dataset, + "client_options": ClientOptions(api_endpoint=f"http://{bigquery_service.host}:{bigquery_service.port}"), + "credentials": AnonymousCredentials(), # type: ignore[no-untyped-call] + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE OR REPLACE TABLE `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` ( + id INT64, + name STRING NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}`"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + commands.upgrade() + + # 5. Check current version is now 0001 + current_version = commands.current() + assert current_version == "0001" + + # 6. Downgrade + commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_bigquery_migration_error_handling(bigquery_service: BigQueryService) -> None: + """Test BigQuery migration error handling.""" + pytest.skip("BigQuery migration tests require real BigQuery backend (emulator has SQL syntax limitations)") + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + from google.api_core.client_options import ClientOptions + from google.auth.credentials import AnonymousCredentials + + config = BigQueryConfig( + connection_config={ + "project": bigquery_service.project, + "dataset_id": bigquery_service.dataset, + "client_options": ClientOptions(api_endpoint=f"http://{bigquery_service.host}:{bigquery_service.port}"), + "credentials": AnonymousCredentials(), # type: ignore[no-untyped-call] + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_bigquery_error", + }, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE IF EXISTS invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + commands.upgrade() + + # 4. Verify no migration was recorded due to error + with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = driver.execute("SELECT COUNT(*) as count FROM sqlspec_migrations_bigquery_error") + assert result.data[0]["count"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_bigquery_migration_with_transactions(bigquery_service: BigQueryService) -> None: + """Test BigQuery migrations work properly with transactions.""" + pytest.skip("BigQuery migration tests require real BigQuery backend (emulator has SQL syntax limitations)") + # Generate unique table names for this test + test_id = "bigquery_transactions" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + from google.api_core.client_options import ClientOptions + from google.auth.credentials import AnonymousCredentials + + config = BigQueryConfig( + connection_config={ + "project": bigquery_service.project, + "dataset_id": bigquery_service.dataset, + "client_options": ClientOptions(api_endpoint=f"http://{bigquery_service.host}:{bigquery_service.port}"), + "credentials": AnonymousCredentials(), # type: ignore[no-untyped-call] + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE OR REPLACE TABLE `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` ( + id INT64, + name STRING NOT NULL, + email STRING + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}`"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + commands.upgrade() + + # 4. Test session behavior (BigQuery has limited transaction support) + with config.provide_session() as driver: + # Insert data - BigQuery auto-commits each statement + driver.execute( + f"INSERT INTO `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` (id, name, email) VALUES (@id, @name, @email)", + {"id": 1, "name": "Transaction User", "email": "trans@example.com"}, + ) + + # Verify data exists + result = driver.execute( + f"SELECT * FROM `{bigquery_service.project}.{bigquery_service.dataset}.{users_table}` WHERE name = 'Transaction User'" + ) + assert len(result.data) == 1 + finally: + if config.pool_instance: + config.close_pool() diff --git a/tests/integration/test_adapters/test_duckdb/test_migrations.py b/tests/integration/test_adapters/test_duckdb/test_migrations.py new file mode 100644 index 000000000..0625bc4d0 --- /dev/null +++ b/tests/integration/test_adapters/test_duckdb/test_migrations.py @@ -0,0 +1,329 @@ +"""Integration tests for DuckDB migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest + +from sqlspec.adapters.duckdb.config import DuckDBConfig +from sqlspec.migrations.commands import MigrationCommands + + +@pytest.mark.xdist_group("migrations") +def test_duckdb_migration_full_workflow() -> None: + """Test full DuckDB migration workflow: init -> create -> upgrade -> downgrade.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.duckdb" + + # Create DuckDB config with migration directory + config = DuckDBConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = '''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id INTEGER PRIMARY KEY, + name VARCHAR NOT NULL, + email VARCHAR UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + # 3. Apply migration (upgrade) + commands.upgrade() + + # 4. Verify migration was applied + with config.provide_session() as driver: + # Check that table exists + result = driver.execute("SELECT table_name FROM information_schema.tables WHERE table_name = 'users'") + assert len(result.data) == 1 + + # Insert test data + driver.execute("INSERT INTO users (id, name, email) VALUES (?, ?, ?)", (1, "John Doe", "john@example.com")) + + # Verify data + users_result = driver.execute("SELECT * FROM users") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + commands.downgrade("base") + + # 6. Verify table was dropped + with config.provide_session() as driver: + result = driver.execute("SELECT table_name FROM information_schema.tables WHERE table_name = 'users'") + assert len(result.data) == 0 + + +@pytest.mark.xdist_group("migrations") +def test_duckdb_multiple_migrations_workflow() -> None: + """Test DuckDB workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.duckdb" + + # Create DuckDB config with migration directory + config = DuckDBConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # First migration - create users table + migration1_content = '''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id INTEGER PRIMARY KEY, + name VARCHAR NOT NULL, + email VARCHAR UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Second migration - create posts table + migration2_content = '''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE posts ( + id INTEGER PRIMARY KEY, + title VARCHAR NOT NULL, + content TEXT, + user_id INTEGER, + FOREIGN KEY (user_id) REFERENCES users (id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS posts"] +''' + + # Write migration files + (migration_dir / "0001_create_users.py").write_text(migration1_content) + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + # Apply all migrations + commands.upgrade() + + # Verify both tables exist + with config.provide_session() as driver: + tables_result = driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' ORDER BY table_name" + ) + table_names = [t["table_name"] for t in tables_result.data] + assert "users" in table_names + assert "posts" in table_names + + # Test the relationship + driver.execute("INSERT INTO users (id, name, email) VALUES (?, ?, ?)", (1, "Author", "author@example.com")) + driver.execute( + "INSERT INTO posts (id, title, content, user_id) VALUES (?, ?, ?, ?)", (1, "My Post", "Post content", 1) + ) + + posts_result = driver.execute("SELECT * FROM posts") + assert len(posts_result.data) == 1 + assert posts_result.data[0]["title"] == "My Post" + + # Downgrade to revision 0001 (should drop posts table) + commands.downgrade("0001") + + with config.provide_session() as driver: + tables_result = driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'main'" + ) + table_names = [t["table_name"] for t in tables_result.data] + assert "users" in table_names + assert "posts" not in table_names + + # Downgrade to base (should drop all tables) + commands.downgrade("base") + + with config.provide_session() as driver: + tables_result = driver.execute( + "SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' AND table_name NOT LIKE 'sqlspec_%'" + ) + # Should only have migration tracking table remaining + table_names = [t["table_name"] for t in tables_result.data if not t["table_name"].startswith("sqlspec_")] + assert len(table_names) == 0 + + +@pytest.mark.xdist_group("migrations") +def test_duckdb_migration_current_command() -> None: + """Test the current migration command shows correct version for DuckDB.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.duckdb" + + # Create DuckDB config with migration directory + config = DuckDBConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Should show no current version initially + commands.current(verbose=False) # This just outputs to console + + # Create and apply a migration + migration_content = '''"""Test migration.""" + + +def up(): + """Create test table.""" + return ["CREATE TABLE test_table (id INTEGER PRIMARY KEY)"] + + +def down(): + """Drop test table.""" + return ["DROP TABLE IF EXISTS test_table"] +''' + + (migration_dir / "0001_test.py").write_text(migration_content) + + # Apply migration + commands.upgrade() + + # Check current version (this just outputs, can't assert return value) + commands.current(verbose=True) + + +@pytest.mark.xdist_group("migrations") +def test_duckdb_migration_error_handling() -> None: + """Test DuckDB migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.duckdb" + + # Create DuckDB config with migration directory + config = DuckDBConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Create a migration with syntax error + migration_content = '''"""Bad migration.""" + + +def up(): + """Invalid SQL - should cause error.""" + return ["CREATE BIG_TABLE invalid_sql"] + + +def down(): + """No downgrade needed.""" + return [] +''' + + (migration_dir / "0001_bad.py").write_text(migration_content) + + # Attempting to upgrade should raise an error + with pytest.raises(Exception): # Will be wrapped in some migration exception + commands.upgrade() + + +@pytest.mark.xdist_group("migrations") +def test_duckdb_migration_with_transactions() -> None: + """Test DuckDB migrations work properly with transactions.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + db_path = Path(temp_dir) / "test.duckdb" + + # Create DuckDB config with migration directory + config = DuckDBConfig( + pool_config={"database": str(db_path)}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Create a migration that uses transactions + migration_content = '''"""Migration with multiple operations.""" + + +def up(): + """Create customers table with data.""" + return [ + """CREATE TABLE customers ( + id INTEGER PRIMARY KEY, + name VARCHAR NOT NULL + )""", + "INSERT INTO customers (id, name) VALUES (1, 'Customer 1')", + "INSERT INTO customers (id, name) VALUES (2, 'Customer 2')" + ] + + +def down(): + """Drop customers table.""" + return ["DROP TABLE IF EXISTS customers"] +''' + + (migration_dir / "0001_transaction_test.py").write_text(migration_content) + + # Apply migration + commands.upgrade() + + # Verify both table and data exist + with config.provide_session() as driver: + customers_result = driver.execute("SELECT * FROM customers ORDER BY name") + assert len(customers_result.data) == 2 + assert customers_result.data[0]["name"] == "Customer 1" + assert customers_result.data[1]["name"] == "Customer 2" + + # Downgrade should remove everything + commands.downgrade("base") + + with config.provide_session() as driver: + result = driver.execute("SELECT table_name FROM information_schema.tables WHERE table_name = 'customers'") + assert len(result.data) == 0 diff --git a/tests/integration/test_adapters/test_oracledb/test_migrations.py b/tests/integration/test_adapters/test_oracledb/test_migrations.py new file mode 100644 index 000000000..91715b38e --- /dev/null +++ b/tests/integration/test_adapters/test_oracledb/test_migrations.py @@ -0,0 +1,904 @@ +"""Integration tests for OracleDB migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest +from pytest_databases.docker.oracle import OracleService + +from sqlspec.adapters.oracledb.config import OracleAsyncConfig, OracleSyncConfig +from sqlspec.migrations.commands import AsyncMigrationCommands, MigrationCommands + + +@pytest.mark.xdist_group("migrations") +def test_oracledb_sync_migration_full_workflow(oracle_23ai_service: OracleService) -> None: + """Test full OracleDB sync migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "oracledb_sync_full" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Oracle sync config with migration directory + config = OracleSyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL, + email VARCHAR2(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + commands.upgrade() + + # 4. Verify migration was applied + with config.provide_session() as driver: + # Check that table exists + result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + assert len(result.data) == 1 + + # Insert test data + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["NAME"] == "John Doe" # Oracle returns uppercase + assert users_result.data[0]["EMAIL"] == "john@example.com" + + # 5. Downgrade migration + commands.downgrade("base") + + # 6. Verify table was dropped + with config.provide_session() as driver: + result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_oracledb_async_migration_full_workflow(oracle_23ai_service: OracleService) -> None: + """Test full OracleDB async migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "oracledb_async_full" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Oracle async config with migration directory + config = OracleAsyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + "min": 1, + "max": 5, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL, + email VARCHAR2(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + await commands.upgrade() + + # 4. Verify migration was applied + async with config.provide_session() as driver: + # Check that table exists + result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + assert len(result.data) == 1 + + # Insert test data + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = await driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["NAME"] == "John Doe" # Oracle returns uppercase + assert users_result.data[0]["EMAIL"] == "john@example.com" + + # 5. Downgrade migration + await commands.downgrade("base") + + # 6. Verify table was dropped + async with config.provide_session() as driver: + result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_oracledb_sync_multiple_migrations_workflow(oracle_23ai_service: OracleService) -> None: + """Test OracleDB sync workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "oracledb_sync_multiple" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleSyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL, + email VARCHAR2(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + title VARCHAR2(255) NOT NULL, + content CLOB, + user_id NUMBER, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT fk_posts_user FOREIGN KEY (user_id) REFERENCES {users_table}(id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE {posts_table}"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + commands.upgrade() + + # 5. Verify both tables exist + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + posts_result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{posts_table.upper()}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # Test relational integrity + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", ("John Doe", "john@example.com") + ) + driver.execute( + f"INSERT INTO {posts_table} (title, content, user_id) VALUES (:1, :2, :3)", + ("Test Post", "This is a test post", 1), + ) + + # 6. Downgrade to version 0001 (should remove posts table) + commands.downgrade("0001") + + # 7. Verify only users table remains + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + posts_result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{posts_table.upper()}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + commands.downgrade("base") + + # 9. Verify all tables are gone + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name IN ('{users_table.upper()}', '{posts_table.upper()}')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_oracledb_async_multiple_migrations_workflow(oracle_23ai_service: OracleService) -> None: + """Test OracleDB async workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "oracledb_async_multiple" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleAsyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + "min": 1, + "max": 5, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL, + email VARCHAR2(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + title VARCHAR2(255) NOT NULL, + content CLOB, + user_id NUMBER, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + CONSTRAINT fk_posts_user FOREIGN KEY (user_id) REFERENCES {users_table}(id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE {posts_table}"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + await commands.upgrade() + + # 5. Verify both tables exist + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + posts_result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{posts_table.upper()}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # Test relational integrity + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", ("John Doe", "john@example.com") + ) + await driver.execute( + f"INSERT INTO {posts_table} (title, content, user_id) VALUES (:1, :2, :3)", + ("Test Post", "This is a test post", 1), + ) + + # 6. Downgrade to version 0001 (should remove posts table) + await commands.downgrade("0001") + + # 7. Verify only users table remains + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'" + ) + posts_result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name = '{posts_table.upper()}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + await commands.downgrade("base") + + # 9. Verify all tables are gone + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM user_tables WHERE table_name IN ('{users_table.upper()}', '{posts_table.upper()}')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_oracledb_sync_migration_current_command(oracle_23ai_service: OracleService) -> None: + """Test the current migration command shows correct version for OracleDB sync.""" + # Generate unique table names for this test + test_id = "oracledb_sync_current" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleSyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + commands.upgrade() + + # 5. Check current version is now 0001 + current_version = commands.current() + assert current_version == "0001" + + # 6. Downgrade + commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_oracledb_async_migration_current_command(oracle_23ai_service: OracleService) -> None: + """Test the current migration command shows correct version for OracleDB async.""" + # Generate unique table names for this test + test_id = "oracledb_async_current" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleAsyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + "min": 1, + "max": 5, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = await commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + await commands.upgrade() + + # 5. Check current version is now 0001 + current_version = await commands.current() + assert current_version == "0001" + + # 6. Downgrade + await commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = await commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_oracledb_sync_migration_error_handling(oracle_23ai_service: OracleService) -> None: + """Test OracleDB sync migration error handling.""" + # Generate unique table names for this test + test_id = "oracledb_sync_error" + migration_table = f"sqlspec_migrations_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleSyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + commands.upgrade() + + # 4. Verify no migration was recorded due to error + with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = driver.execute(f"SELECT COUNT(*) as count FROM {migration_table}") + assert result.data[0]["COUNT"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_oracledb_async_migration_error_handling(oracle_23ai_service: OracleService) -> None: + """Test OracleDB async migration error handling.""" + # Generate unique table names for this test + test_id = "oracledb_async_error" + migration_table = f"sqlspec_migrations_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleAsyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + "min": 1, + "max": 5, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + await commands.upgrade() + + # 4. Verify no migration was recorded due to error + async with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = await driver.execute(f"SELECT COUNT(*) as count FROM {migration_table}") + assert result.data[0]["COUNT"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +def test_oracledb_sync_migration_with_transactions(oracle_23ai_service: OracleService) -> None: + """Test OracleDB sync migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "oracledb_sync_trans" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleSyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL, + email VARCHAR2(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + commands.upgrade() + + # 4. Test transaction behavior with the session + with config.provide_session() as driver: + # Start manual transaction + driver.begin() + try: + # Insert data within transaction + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", + ("Transaction User", "trans@example.com"), + ) + + # Verify data exists within transaction + result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + driver.commit() + except Exception: + driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + with config.provide_session() as driver: + driver.begin() + try: + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", + ("Rollback User", "rollback@example.com"), + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + driver.rollback() + + # Verify rollback - data should not exist + result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_oracledb_async_migration_with_transactions(oracle_23ai_service: OracleService) -> None: + """Test OracleDB async migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "oracledb_async_trans" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = OracleAsyncConfig( + pool_config={ + "host": oracle_23ai_service.host, + "port": oracle_23ai_service.port, + "service_name": oracle_23ai_service.service_name, + "user": oracle_23ai_service.user, + "password": oracle_23ai_service.password, + "min": 1, + "max": 5, + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id NUMBER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + name VARCHAR2(255) NOT NULL, + email VARCHAR2(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + await commands.upgrade() + + # 4. Test transaction behavior with the session + async with config.provide_session() as driver: + # Start manual transaction + await driver.begin() + try: + # Insert data within transaction + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", + ("Transaction User", "trans@example.com"), + ) + + # Verify data exists within transaction + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + await driver.commit() + except Exception: + await driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + async with config.provide_session() as driver: + await driver.begin() + try: + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (:1, :2)", + ("Rollback User", "rollback@example.com"), + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + await driver.rollback() + + # Verify rollback - data should not exist + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() diff --git a/tests/integration/test_adapters/test_psqlpy/test_migrations.py b/tests/integration/test_adapters/test_psqlpy/test_migrations.py new file mode 100644 index 000000000..9a9ab5dbe --- /dev/null +++ b/tests/integration/test_adapters/test_psqlpy/test_migrations.py @@ -0,0 +1,427 @@ +"""Integration tests for Psqlpy (PostgreSQL) migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest +from pytest_databases.docker.postgres import PostgresService + +from sqlspec.adapters.psqlpy.config import PsqlpyConfig +from sqlspec.migrations.commands import AsyncMigrationCommands + + +@pytest.mark.xdist_group("migrations") +async def test_psqlpy_migration_full_workflow(postgres_service: PostgresService) -> None: + """Test full Psqlpy migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "psqlpy_full_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Psqlpy config with migration directory + config = PsqlpyConfig( + pool_config={ + "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + await commands.upgrade() + + # 4. Verify migration was applied + async with config.provide_session() as driver: + # Check that table exists + result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + assert len(result.data) == 1 + + # Insert test data + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES ($1, $2)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = await driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + await commands.downgrade("base") + + # 6. Verify table was dropped + async with config.provide_session() as driver: + result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psqlpy_multiple_migrations_workflow(postgres_service: PostgresService) -> None: + """Test Psqlpy workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "psqlpy_multi_workflow" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsqlpyConfig( + pool_config={ + "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id SERIAL PRIMARY KEY, + title VARCHAR(255) NOT NULL, + content TEXT, + user_id INTEGER REFERENCES {users_table}(id), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS {posts_table}"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + await commands.upgrade() + + # 5. Verify both tables exist + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + posts_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # Test relational integrity + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES ($1, $2)", ("John Doe", "john@example.com") + ) + await driver.execute( + f"INSERT INTO {posts_table} (title, content, user_id) VALUES ($1, $2, $3)", + ("Test Post", "This is a test post", 1), + ) + + # 6. Downgrade to version 0001 (should remove posts table) + await commands.downgrade("0001") + + # 7. Verify only users table remains + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + posts_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + await commands.downgrade("base") + + # 9. Verify all tables are gone + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name IN ('{users_table}', '{posts_table}')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psqlpy_migration_current_command(postgres_service: PostgresService) -> None: + """Test the current migration command shows correct version for Psqlpy.""" + # Generate unique table names for this test + test_id = "psqlpy_current_cmd" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsqlpyConfig( + pool_config={ + "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = await commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + await commands.upgrade() + + # 5. Check current version is now 0001 + current_version = await commands.current() + assert current_version == "0001" + + # 6. Downgrade + await commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = await commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psqlpy_migration_error_handling(postgres_service: PostgresService) -> None: + """Test Psqlpy migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsqlpyConfig( + pool_config={ + "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations_psqlpy"}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE IF EXISTS invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + await commands.upgrade() + + # 4. Verify no migration was recorded due to error + async with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = await driver.execute("SELECT COUNT(*) as count FROM sqlspec_migrations") + assert result.data[0]["count"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + await config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psqlpy_migration_with_transactions(postgres_service: PostgresService) -> None: + """Test Psqlpy migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "psqlpy_transactions" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsqlpyConfig( + pool_config={ + "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + await commands.upgrade() + + # 4. Test transaction behavior with the session + async with config.provide_session() as driver: + # Start manual transaction + await driver.begin() + try: + # Insert data within transaction + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES ($1, $2)", + ("Transaction User", "trans@example.com"), + ) + + # Verify data exists within transaction + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + await driver.commit() + except Exception: + await driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + async with config.provide_session() as driver: + await driver.begin() + try: + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES ($1, $2)", + ("Rollback User", "rollback@example.com"), + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + await driver.rollback() + + # Verify rollback - data should not exist + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + await config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/test_migrations.py b/tests/integration/test_adapters/test_psycopg/test_migrations.py new file mode 100644 index 000000000..3b28b821c --- /dev/null +++ b/tests/integration/test_adapters/test_psycopg/test_migrations.py @@ -0,0 +1,992 @@ +"""Integration tests for Psycopg (PostgreSQL) migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest +from pytest_databases.docker.postgres import PostgresService + +from sqlspec.adapters.psycopg.config import PsycopgSyncConfig +from sqlspec.migrations.commands import AsyncMigrationCommands, MigrationCommands + + +@pytest.mark.xdist_group("migrations") +def test_psycopg_sync_migration_full_workflow(postgres_service: PostgresService) -> None: + """Test full Psycopg sync migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "psycopg_sync_full" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Psycopg sync config with migration directory + config = PsycopgSyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + commands.upgrade() + + # 4. Verify migration was applied + with config.provide_session() as driver: + # Check that table exists + result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + assert len(result.data) == 1 + + # Insert test data + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + commands.downgrade("base") + + # 6. Verify table was dropped + with config.provide_session() as driver: + result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + config.close_pool() + # This test would require a PostgreSQL instance running in CI + # Implementation would be similar but with PostgreSQL-specific SQL and config + + # with tempfile.TemporaryDirectory() as temp_dir: + # migration_dir = Path(temp_dir) / "migrations" + # + # # Create Psycopg sync config with migration directory + # config = PsycopgSyncConfig( + # pool_config={ + # "conninfo": "postgresql://test_user:test_password@localhost:5432/test_db" + # }, + # migration_config={ + # "script_location": str(migration_dir), + # "version_table_name": "sqlspec_migrations" + # } + # ) + # commands = MigrationCommands(config) + # + # # 1. Initialize migrations + # commands.init(str(migration_dir), package=True) + # + # # Verify initialization + # assert migration_dir.exists() + # assert (migration_dir / "__init__.py").exists() + # + # # 2. Create a migration with simple schema + # migration_content = '''"""Initial schema migration.""" + # + # + # def up(): + # """Create users table.""" + # return [""" + # CREATE TABLE users ( + # id SERIAL PRIMARY KEY, + # name VARCHAR(255) NOT NULL, + # email VARCHAR(255) UNIQUE NOT NULL, + # created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + # ) + # """] + # + # + # def down(): + # """Drop users table.""" + # return ["DROP TABLE IF EXISTS users"] + # ''' + # + # # Write migration file + # migration_file = migration_dir / "0001_create_users.py" + # migration_file.write_text(migration_content) + # + # # 3. Apply migration (upgrade) + # commands.upgrade() + # + # # 4. Verify migration was applied + # with config.provide_session() as driver: + # # Check that table exists + # result = driver.execute( + # "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users'" + # ) + # assert len(result.data) == 1 + # + # # Insert test data + # driver.execute( + # "INSERT INTO users (name, email) VALUES (%s, %s)", + # ("John Doe", "john@example.com") + # ) + # + # # Verify data + # users_result = driver.execute("SELECT * FROM users") + # assert len(users_result.data) == 1 + # assert users_result.data[0]["name"] == "John Doe" + # assert users_result.data[0]["email"] == "john@example.com" + # + # # 5. Downgrade migration + # commands.downgrade("base") + # + # # 6. Verify table was dropped + # with config.provide_session() as driver: + # result = driver.execute( + # "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'users'" + # ) + # assert len(result.data) == 0 + + +@pytest.mark.xdist_group("migrations") +async def test_psycopg_async_migration_full_workflow(postgres_service: PostgresService) -> None: + """Test full Psycopg async migration workflow: init -> create -> upgrade -> downgrade.""" + # Generate unique table names for this test + test_id = "psycopg_async_full" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Psycopg async config with migration directory + try: + from sqlspec.adapters.psycopg.config import PsycopgAsyncConfig + except ImportError: + pytest.skip("PsycopgAsyncConfig not available") + + config = PsycopgAsyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + + # Write migration file + migration_file = migration_dir / "0001_create_users.py" + migration_file.write_text(migration_content) + + try: + # 3. Apply migration (upgrade) + await commands.upgrade() + + # 4. Verify migration was applied + async with config.provide_session() as driver: + # Check that table exists + result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + assert len(result.data) == 1 + + # Insert test data + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", ("John Doe", "john@example.com") + ) + + # Verify data + users_result = await driver.execute(f"SELECT * FROM {users_table}") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + await commands.downgrade("base") + + # 6. Verify table was dropped + async with config.provide_session() as driver: + result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + assert len(result.data) == 0 + finally: + # Ensure pool is closed + if config.pool_instance: + import asyncio + + try: + asyncio.get_running_loop() + import concurrent.futures + + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(asyncio.run, config.close_pool()) + future.result() + except RuntimeError: + asyncio.run(config.close_pool()) + + +@pytest.mark.xdist_group("migrations") +def test_psycopg_sync_multiple_migrations_workflow(postgres_service: PostgresService) -> None: + """Test Psycopg sync workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "psycopg_sync_multi" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsycopgSyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id SERIAL PRIMARY KEY, + title VARCHAR(255) NOT NULL, + content TEXT, + user_id INTEGER REFERENCES {users_table}(id), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS {posts_table}"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + commands.upgrade() + + # 5. Verify both tables exist + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + posts_result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # 6. Downgrade to version 0001 (should remove posts table) + commands.downgrade("0001") + + # 7. Verify only users table remains + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + posts_result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + commands.downgrade("base") + + # 9. Verify all tables are gone + with config.provide_session() as driver: + users_result = driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name IN ('{users_table}', '{posts_table}')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psycopg_async_multiple_migrations_workflow(postgres_service: PostgresService) -> None: + """Test Psycopg async workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + # Generate unique table names for this test + test_id = "psycopg_async_multi" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + posts_table = f"posts_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create Psycopg async config with migration directory + try: + from sqlspec.adapters.psycopg.config import PsycopgAsyncConfig + except ImportError: + pytest.skip("PsycopgAsyncConfig not available") + + config = PsycopgAsyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create first migration + migration1_content = f'''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration1_content) + + # 3. Create second migration + migration2_content = f'''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE {posts_table} ( + id SERIAL PRIMARY KEY, + title VARCHAR(255) NOT NULL, + content TEXT, + user_id INTEGER REFERENCES {users_table}(id), + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS {posts_table}"] +''' + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + try: + # 4. Apply all migrations + await commands.upgrade() + + # 5. Verify both tables exist + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + posts_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 1 + + # 6. Downgrade to version 0001 (should remove posts table) + await commands.downgrade("0001") + + # 7. Verify only users table remains + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{users_table}'" + ) + posts_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = '{posts_table}'" + ) + assert len(users_result.data) == 1 + assert len(posts_result.data) == 0 + + # 8. Downgrade to base + await commands.downgrade("base") + + # 9. Verify all tables are gone + async with config.provide_session() as driver: + users_result = await driver.execute( + f"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name IN ('{users_table}', '{posts_table}')" + ) + assert len(users_result.data) == 0 + finally: + if config.pool_instance: + import asyncio + + try: + asyncio.get_running_loop() + import concurrent.futures + + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(asyncio.run, config.close_pool()) + future.result() + except RuntimeError: + asyncio.run(config.close_pool()) + + +@pytest.mark.xdist_group("migrations") +def test_psycopg_sync_migration_current_command(postgres_service: PostgresService) -> None: + """Test the current migration command shows correct version for Psycopg sync.""" + # Generate unique table names for this test + test_id = "psycopg_sync_current" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsycopgSyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + commands.upgrade() + + # 5. Check current version is now 0001 + current_version = commands.current() + assert current_version == "0001" + + # 6. Downgrade + commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psycopg_async_migration_current_command(postgres_service: PostgresService) -> None: + """Test the current migration command shows correct version for Psycopg async.""" + # Generate unique table names for this test + test_id = "psycopg_async_current" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + try: + from sqlspec.adapters.psycopg.config import PsycopgAsyncConfig + except ImportError: + pytest.skip("PsycopgAsyncConfig not available") + + config = PsycopgAsyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Initially no current version + current_version = await commands.current() + assert current_version is None or current_version == "base" + + # 3. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 4. Apply migration + await commands.upgrade() + + # 5. Check current version is now 0001 + current_version = await commands.current() + assert current_version == "0001" + + # 6. Downgrade + await commands.downgrade("base") + + # 7. Check current version is back to base/None + current_version = await commands.current() + assert current_version is None or current_version == "base" + finally: + if config.pool_instance: + import asyncio + + try: + asyncio.get_running_loop() + import concurrent.futures + + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(asyncio.run, config.close_pool()) + future.result() + except RuntimeError: + asyncio.run(config.close_pool()) + + +@pytest.mark.xdist_group("migrations") +def test_psycopg_sync_migration_error_handling(postgres_service: PostgresService) -> None: + """Test Psycopg sync migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsycopgSyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_psycopg_sync_error", + }, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE IF EXISTS invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + commands.upgrade() + + # 4. Verify no migration was recorded due to error + with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = driver.execute("SELECT COUNT(*) as count FROM sqlspec_migrations_psycopg_sync_error") + assert result.data[0]["count"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psycopg_async_migration_error_handling(postgres_service: PostgresService) -> None: + """Test Psycopg async migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + try: + from sqlspec.adapters.psycopg.config import PsycopgAsyncConfig + except ImportError: + pytest.skip("PsycopgAsyncConfig not available") + + config = PsycopgAsyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={ + "script_location": str(migration_dir), + "version_table_name": "sqlspec_migrations_psycopg_async_error", + }, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration with invalid SQL + migration_content = '''"""Migration with invalid SQL.""" + + +def up(): + """Create table with invalid SQL.""" + return ["CREATE INVALID SQL STATEMENT"] + + +def down(): + """Drop table.""" + return ["DROP TABLE IF EXISTS invalid_table"] +''' + (migration_dir / "0001_invalid.py").write_text(migration_content) + + # 3. Try to apply migration - should raise an error + with pytest.raises(Exception): + await commands.upgrade() + + # 4. Verify no migration was recorded due to error + async with config.provide_session() as driver: + # Check migration tracking table exists but is empty + try: + result = await driver.execute( + "SELECT COUNT(*) as count FROM sqlspec_migrations_psycopg_async_error" + ) + assert result.data[0]["count"] == 0 + except Exception: + # If table doesn't exist, that's also acceptable + pass + finally: + if config.pool_instance: + import asyncio + + try: + asyncio.get_running_loop() + import concurrent.futures + + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(asyncio.run, config.close_pool()) + future.result() + except RuntimeError: + asyncio.run(config.close_pool()) + + +@pytest.mark.xdist_group("migrations") +def test_psycopg_sync_migration_with_transactions(postgres_service: PostgresService) -> None: + """Test Psycopg sync migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "psycopg_sync_trans" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + config = PsycopgSyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = MigrationCommands(config) + + try: + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + commands.upgrade() + + # 4. Test transaction behavior with the session + with config.provide_session() as driver: + # Start manual transaction + driver.begin() + try: + # Insert data within transaction + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", + ("Transaction User", "trans@example.com"), + ) + + # Verify data exists within transaction + result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + driver.commit() + except Exception: + driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + with config.provide_session() as driver: + driver.begin() + try: + driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", + ("Rollback User", "rollback@example.com"), + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + driver.rollback() + + # Verify rollback - data should not exist + result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + config.close_pool() + + +@pytest.mark.xdist_group("migrations") +async def test_psycopg_async_migration_with_transactions(postgres_service: PostgresService) -> None: + """Test Psycopg async migrations work properly with transactions.""" + # Generate unique table names for this test + test_id = "psycopg_async_trans" + migration_table = f"sqlspec_migrations_{test_id}" + users_table = f"users_{test_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + try: + from sqlspec.adapters.psycopg.config import PsycopgAsyncConfig + except ImportError: + pytest.skip("PsycopgAsyncConfig not available") + + config = PsycopgAsyncConfig( + pool_config={ + "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" + }, + migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, + ) + commands = AsyncMigrationCommands(config) + + try: + # 1. Initialize migrations + await commands.init(str(migration_dir), package=True) + + # 2. Create a migration + migration_content = f'''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE {users_table} ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + email VARCHAR(255) UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS {users_table}"] +''' + (migration_dir / "0001_create_users.py").write_text(migration_content) + + # 3. Apply migration + await commands.upgrade() + + # 4. Test transaction behavior with the session + async with config.provide_session() as driver: + # Start manual transaction + await driver.begin() + try: + # Insert data within transaction + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", + ("Transaction User", "trans@example.com"), + ) + + # Verify data exists within transaction + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + await driver.commit() + except Exception: + await driver.rollback() + raise + + # Transaction should be committed - verify data persists + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Transaction User'") + assert len(result.data) == 1 + + # 5. Test transaction rollback + async with config.provide_session() as driver: + await driver.begin() + try: + await driver.execute( + f"INSERT INTO {users_table} (name, email) VALUES (%s, %s)", + ("Rollback User", "rollback@example.com"), + ) + # Force an error to trigger rollback + raise Exception("Intentional rollback") + except Exception: + await driver.rollback() + + # Verify rollback - data should not exist + result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") + assert len(result.data) == 0 + finally: + if config.pool_instance: + import asyncio + + try: + asyncio.get_running_loop() + import concurrent.futures + + with concurrent.futures.ThreadPoolExecutor() as executor: + future = executor.submit(asyncio.run, config.close_pool()) + future.result() + except RuntimeError: + asyncio.run(config.close_pool()) diff --git a/tests/integration/test_adapters/test_sqlite/test_migrations.py b/tests/integration/test_adapters/test_sqlite/test_migrations.py new file mode 100644 index 000000000..e5a40b5bd --- /dev/null +++ b/tests/integration/test_adapters/test_sqlite/test_migrations.py @@ -0,0 +1,320 @@ +"""Integration tests for SQLite migration workflow.""" + +import tempfile +from pathlib import Path + +import pytest + +from sqlspec.adapters.sqlite.config import SqliteConfig +from sqlspec.migrations.commands import MigrationCommands + + +@pytest.mark.xdist_group("migrations") +def test_sqlite_migration_full_workflow() -> None: + """Test full SQLite migration workflow: init -> create -> upgrade -> downgrade.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create SQLite config with migration directory + config = SqliteConfig( + pool_config={"database": ":memory:"}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # 1. Initialize migrations + commands.init(str(migration_dir), package=True) + + # Verify initialization + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + # 2. Create a migration with simple schema + migration_content = '''"""Initial schema migration.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT UNIQUE NOT NULL, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Write migration file + migration_file = migration_dir / "001_create_users.py" + migration_file.write_text(migration_content) + + # 3. Apply migration (upgrade) + commands.upgrade() + + # 4. Verify migration was applied + with config.provide_session() as driver: + # Check that table exists + result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='users'") + assert len(result.data) == 1 + + # Insert test data + driver.execute("INSERT INTO users (name, email) VALUES (?, ?)", ("John Doe", "john@example.com")) + + # Verify data + users_result = driver.execute("SELECT * FROM users") + assert len(users_result.data) == 1 + assert users_result.data[0]["name"] == "John Doe" + assert users_result.data[0]["email"] == "john@example.com" + + # 5. Downgrade migration + commands.downgrade("base") + + # 6. Verify table was dropped + with config.provide_session() as driver: + result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='users'") + assert len(result.data) == 0 + + +@pytest.mark.xdist_group("migrations") +def test_sqlite_multiple_migrations_workflow() -> None: + """Test SQLite workflow with multiple migrations: create -> apply both -> downgrade one -> downgrade all.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create SQLite config with migration directory + config = SqliteConfig( + pool_config={"database": ":memory:"}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # First migration - create users table + migration1_content = '''"""Create users table.""" + + +def up(): + """Create users table.""" + return [""" + CREATE TABLE users ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + email TEXT UNIQUE NOT NULL + ) + """] + + +def down(): + """Drop users table.""" + return ["DROP TABLE IF EXISTS users"] +''' + + # Second migration - create posts table + migration2_content = '''"""Create posts table.""" + + +def up(): + """Create posts table.""" + return [""" + CREATE TABLE posts ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + title TEXT NOT NULL, + content TEXT, + user_id INTEGER, + FOREIGN KEY (user_id) REFERENCES users (id) + ) + """] + + +def down(): + """Drop posts table.""" + return ["DROP TABLE IF EXISTS posts"] +''' + + # Write migration files + (migration_dir / "0001_create_users.py").write_text(migration1_content) + (migration_dir / "0002_create_posts.py").write_text(migration2_content) + + # Apply all migrations + commands.upgrade() + + # Verify both tables exist + with config.provide_session() as driver: + tables_result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name") + table_names = [t["name"] for t in tables_result.data] + assert "users" in table_names + assert "posts" in table_names + + # Test the relationship + driver.execute("INSERT INTO users (name, email) VALUES (?, ?)", ("Author", "author@example.com")) + driver.execute( + "INSERT INTO posts (title, content, user_id) VALUES (?, ?, ?)", ("My Post", "Post content", 1) + ) + + posts_result = driver.execute("SELECT * FROM posts") + assert len(posts_result.data) == 1 + assert posts_result.data[0]["title"] == "My Post" + + # Downgrade to revision 0001 (should drop posts table) + commands.downgrade("0001") + + with config.provide_session() as driver: + tables_result = driver.execute("SELECT name FROM sqlite_master WHERE type='table'") + table_names = [t["name"] for t in tables_result.data] + assert "users" in table_names + assert "posts" not in table_names + + # Downgrade to base (should drop all tables) + commands.downgrade("base") + + with config.provide_session() as driver: + tables_result = driver.execute( + "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'" + ) + # Should only have migration tracking table remaining + table_names = [t["name"] for t in tables_result.data if not t["name"].startswith("sqlspec_")] + assert len(table_names) == 0 + + +@pytest.mark.xdist_group("migrations") +def test_sqlite_migration_current_command() -> None: + """Test the current migration command shows correct version for SQLite.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create SQLite config with migration directory + config = SqliteConfig( + pool_config={"database": ":memory:"}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Should show no current version initially + commands.current(verbose=False) # This just outputs to console + + # Create and apply a migration + migration_content = '''"""Test migration.""" + + +def up(): + """Create test table.""" + return ["CREATE TABLE test_table (id INTEGER PRIMARY KEY)"] + + +def down(): + """Drop test table.""" + return ["DROP TABLE IF EXISTS test_table"] +''' + + (migration_dir / "001_test.py").write_text(migration_content) + + # Apply migration + commands.upgrade() + + # Check current version (this just outputs, can't assert return value) + commands.current(verbose=True) + + +@pytest.mark.xdist_group("migrations") +def test_sqlite_migration_error_handling() -> None: + """Test SQLite migration error handling.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create SQLite config with migration directory + config = SqliteConfig( + pool_config={"database": ":memory:"}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Create a migration with syntax error + migration_content = '''"""Bad migration.""" + + +def up(): + """Invalid SQL - should cause error.""" + return ["CREATE THAT TABLE invalid_sql"] + + +def down(): + """No downgrade needed.""" + return [] +''' + + (migration_dir / "001_bad.py").write_text(migration_content) + + # Attempting to upgrade should raise an error + with pytest.raises(Exception): # Will be wrapped in some migration exception + commands.upgrade() + + +@pytest.mark.xdist_group("migrations") +def test_sqlite_migration_with_transactions() -> None: + """Test SQLite migrations work properly with transactions.""" + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + # Create SQLite config with migration directory + config = SqliteConfig( + pool_config={"database": ":memory:"}, + migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, + ) + commands = MigrationCommands(config) + + # Initialize migrations + commands.init(str(migration_dir), package=True) + + # Create a migration that uses transactions + migration_content = '''"""Migration with multiple operations.""" + + +def up(): + """Create customers table with data.""" + return [ + """CREATE TABLE customers ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL + )""", + "INSERT INTO customers (name) VALUES ('Customer 1')", + "INSERT INTO customers (name) VALUES ('Customer 2')" + ] + + +def down(): + """Drop customers table.""" + return ["DROP TABLE IF EXISTS customers"] +''' + + (migration_dir / "0001_transaction_test.py").write_text(migration_content) + + # Apply migration + commands.upgrade() + + # Verify both table and data exist + with config.provide_session() as driver: + customers_result = driver.execute("SELECT * FROM customers ORDER BY name") + assert len(customers_result.data) == 2 + assert customers_result.data[0]["name"] == "Customer 1" + assert customers_result.data[1]["name"] == "Customer 2" + + # Downgrade should remove everything + commands.downgrade("base") + + with config.provide_session() as driver: + result = driver.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='customers'") + assert len(result.data) == 0 diff --git a/tests/integration/test_migrations/test_migration_execution.py b/tests/integration/test_migrations/test_migration_execution.py deleted file mode 100644 index 29870dd08..000000000 --- a/tests/integration/test_migrations/test_migration_execution.py +++ /dev/null @@ -1,660 +0,0 @@ -"""Integration tests for migration execution. - -Tests real migration execution including: -- Migration tracking table creation and management -- Upgrade and downgrade execution with real databases -- Migration state tracking and version management -- Transaction handling and rollback scenarios -- Cross-database compatibility - -Uses CORE_ROUND_3 architecture with real database operations. -""" - -from __future__ import annotations - -import tempfile -from collections.abc import Generator -from pathlib import Path -from typing import Any -from unittest.mock import Mock, patch - -import pytest - -from sqlspec.core.statement import SQL -from sqlspec.driver import ExecutionResult -from sqlspec.migrations.base import BaseMigrationRunner, BaseMigrationTracker - - -@pytest.fixture -def temp_workspace() -> Generator[Path, None, None]: - """Create a temporary workspace for migration tests.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace - - -@pytest.fixture -def temp_workspace_with_migrations(tmp_path: Path) -> Path: - """Create a temporary workspace with migrations directory for tests.""" - # Create migrations directory in pytest's tmp_path - migrations_dir = tmp_path / "migrations" - migrations_dir.mkdir() - - return tmp_path - - -class MockMigrationTracker(BaseMigrationTracker): - """Mock migration tracker for testing.""" - - def __init__(self, version_table_name: str = "test_migrations") -> None: - super().__init__(version_table_name) - self._applied_migrations: dict[str, dict[str, Any]] = {} - - def ensure_tracking_table(self, driver: Any) -> None: - """Mock ensure tracking table.""" - pass - - def get_current_version(self, driver: Any) -> str | None: - """Mock get current version.""" - if not self._applied_migrations: - return None - return max(self._applied_migrations.keys()) - - def get_applied_migrations(self, driver: Any) -> list[dict[str, Any]]: - """Mock get applied migrations.""" - return list(self._applied_migrations.values()) - - def record_migration( - self, driver: Any, version: str, description: str, execution_time_ms: int, checksum: str - ) -> None: - """Mock record migration.""" - self._applied_migrations[version] = { - "version_num": version, - "description": description, - "execution_time_ms": execution_time_ms, - "checksum": checksum, - } - - def remove_migration(self, driver: Any, version: str) -> None: - """Mock remove migration.""" - if version in self._applied_migrations: - del self._applied_migrations[version] - - -class MockMigrationRunner(BaseMigrationRunner): - """Mock migration runner for testing.""" - - def __init__(self, migrations_path: Path) -> None: - super().__init__(migrations_path) - self._executed_migrations: list[dict[str, Any]] = [] - - def get_migration_files(self) -> list[tuple[str, Path]]: - """Mock get migration files.""" - return self._get_migration_files_sync() - - def load_migration(self, file_path: Path) -> dict[str, Any]: - """Mock load migration.""" - return self._load_migration_metadata(file_path) - - def execute_upgrade(self, driver: Any, migration: dict[str, Any]) -> ExecutionResult: - """Mock execute upgrade.""" - sql = self._get_migration_sql(migration, "up") - if sql: - # Simulate execution - self._executed_migrations.append({"version": migration["version"], "direction": "up", "sql": sql}) - return Mock(spec=ExecutionResult) - raise ValueError(f"No upgrade SQL for migration {migration['version']}") - - def execute_downgrade(self, driver: Any, migration: dict[str, Any]) -> ExecutionResult: - """Mock execute downgrade.""" - sql = self._get_migration_sql(migration, "down") - if sql: - # Simulate execution - self._executed_migrations.append({"version": migration["version"], "direction": "down", "sql": sql}) - return Mock(spec=ExecutionResult) - return Mock(spec=ExecutionResult) # Return mock even if no SQL (warning case) - - def load_all_migrations(self) -> None: - """Mock load all migrations.""" - pass - - def get_executed_migrations(self) -> list[dict[str, Any]]: - """Get executed migrations for testing.""" - return self._executed_migrations - - -def test_tracking_table_sql_generation() -> None: - """Test migration tracking table SQL generation.""" - tracker = MockMigrationTracker("test_migrations") - - create_sql = tracker._get_create_table_sql() - - assert isinstance(create_sql, SQL) - assert "CREATE TABLE" in create_sql.sql.upper() - assert "test_migrations" in create_sql.sql - assert "version_num" in create_sql.sql - assert "description" in create_sql.sql - assert "applied_at" in create_sql.sql - assert "execution_time_ms" in create_sql.sql - assert "checksum" in create_sql.sql - assert "applied_by" in create_sql.sql - - -def test_current_version_sql_generation() -> None: - """Test current version query SQL generation.""" - tracker = MockMigrationTracker("test_migrations") - - version_sql = tracker._get_current_version_sql() - - assert isinstance(version_sql, SQL) - assert "SELECT" in version_sql.sql.upper() - assert "version_num" in version_sql.sql - assert "test_migrations" in version_sql.sql - assert "ORDER BY" in version_sql.sql.upper() - assert "LIMIT" in version_sql.sql.upper() - - -def test_applied_migrations_sql_generation() -> None: - """Test applied migrations query SQL generation.""" - tracker = MockMigrationTracker("test_migrations") - - applied_sql = tracker._get_applied_migrations_sql() - - assert isinstance(applied_sql, SQL) - assert "SELECT" in applied_sql.sql.upper() - assert "*" in applied_sql.sql - assert "test_migrations" in applied_sql.sql.lower() - assert "ORDER BY" in applied_sql.sql.upper() - assert "version_num" in applied_sql.sql.lower() - - -def test_record_migration_sql_generation() -> None: - """Test migration recording SQL generation.""" - tracker = MockMigrationTracker("test_migrations") - - record_sql = tracker._get_record_migration_sql( - version="0001", description="test migration", execution_time_ms=250, checksum="abc123", applied_by="test_user" - ) - - assert isinstance(record_sql, SQL) - assert "INSERT INTO" in record_sql.sql.upper() - assert "test_migrations" in record_sql.sql - assert "VALUES" in record_sql.sql.upper() - - # Check parameters are set - params = record_sql.parameters - assert "0001" in str(params) or "0001" in record_sql.sql - assert "test migration" in str(params) or "test migration" in record_sql.sql - - -def test_remove_migration_sql_generation() -> None: - """Test migration removal SQL generation.""" - tracker = MockMigrationTracker("test_migrations") - - remove_sql = tracker._get_remove_migration_sql("0001") - - assert isinstance(remove_sql, SQL) - assert "DELETE" in remove_sql.sql.upper() - assert "test_migrations" in remove_sql.sql - assert "WHERE" in remove_sql.sql.upper() - assert "version_num" in remove_sql.sql - - -def test_single_migration_upgrade_execution(temp_workspace_with_migrations: Path) -> None: - """Test execution of a single migration upgrade.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create migration file - migration_file = migrations_dir / "0001_create_users.sql" - migration_content = """ --- name: migrate-0001-up -CREATE TABLE users ( - id INTEGER PRIMARY KEY, - name TEXT NOT NULL, - email TEXT UNIQUE NOT NULL -); - --- name: migrate-0001-down -DROP TABLE users; -""" - migration_file.write_text(migration_content) - - runner = MockMigrationRunner(migrations_dir) - MockMigrationTracker() - mock_driver = Mock() - - # Load migration metadata - with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: - mock_loader = Mock() - mock_loader.validate_migration_file = Mock() - mock_get_loader.return_value = mock_loader - - runner.loader.clear_cache = Mock() - runner.loader.load_sql = Mock() - runner.loader.has_query = Mock(return_value=True) - - migration = runner.load_migration(migration_file) - - # Execute upgrade - with patch("sqlspec.migrations.base.run_") as mock_run: - # run_ should return a callable that returns the SQL statements - mock_run.return_value = lambda file_path: [ - "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT NOT NULL, email TEXT UNIQUE NOT NULL);" - ] - - result = runner.execute_upgrade(mock_driver, migration) - - assert result is not None - - # Verify execution was recorded - executed = runner.get_executed_migrations() - assert len(executed) == 1 - assert executed[0]["version"] == "0001" - assert executed[0]["direction"] == "up" - - -def test_single_migration_downgrade_execution(temp_workspace_with_migrations: Path) -> None: - """Test execution of a single migration downgrade.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create migration file - migration_file = migrations_dir / "0001_create_users.sql" - migration_content = """ --- name: migrate-0001-up -CREATE TABLE users ( - id INTEGER PRIMARY KEY, - name TEXT NOT NULL, - email TEXT UNIQUE NOT NULL -); - --- name: migrate-0001-down -DROP TABLE users; -""" - migration_file.write_text(migration_content) - - runner = MockMigrationRunner(migrations_dir) - mock_driver = Mock() - - # Load migration metadata - with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: - mock_loader = Mock() - mock_loader.validate_migration_file = Mock() - mock_get_loader.return_value = mock_loader - - runner.loader.clear_cache = Mock() - runner.loader.load_sql = Mock() - runner.loader.has_query = Mock(return_value=True) - - migration = runner.load_migration(migration_file) - - # Execute downgrade - with patch("sqlspec.migrations.base.run_") as mock_run: - # run_ should return a callable that returns the SQL statements - mock_run.return_value = lambda file_path: ["DROP TABLE users;"] - - result = runner.execute_downgrade(mock_driver, migration) - - assert result is not None - - # Verify execution was recorded - executed = runner.get_executed_migrations() - assert len(executed) == 1 - assert executed[0]["version"] == "0001" - assert executed[0]["direction"] == "down" - - -def test_multiple_migrations_execution_order(temp_workspace_with_migrations: Path) -> None: - """Test execution order of multiple migrations.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create multiple migration files (in non-sequential creation order) - migrations = [ - ("0003_add_indexes.sql", "CREATE INDEX idx_users_email ON users(email);", "DROP INDEX idx_users_email;"), - ( - "0001_create_users.sql", - "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT);", - "DROP TABLE users;", - ), - ( - "0002_add_products.sql", - "CREATE TABLE products (id INTEGER PRIMARY KEY, name TEXT, price REAL);", - "DROP TABLE products;", - ), - ] - - for filename, up_sql, down_sql in migrations: - migration_file = migrations_dir / filename - version = filename.split("_")[0] - content = f""" --- name: migrate-{version}-up -{up_sql} - --- name: migrate-{version}-down -{down_sql} -""" - migration_file.write_text(content) - - runner = MockMigrationRunner(migrations_dir) - mock_driver = Mock() - - # Get all migration files (should be sorted by version) - migration_files = runner.get_migration_files() - - # Verify correct ordering - assert len(migration_files) == 3 - assert migration_files[0][0] == "0001" # Users first - assert migration_files[1][0] == "0002" # Products second - assert migration_files[2][0] == "0003" # Indexes last - - # Execute all migrations in order - with ( - patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, - ): - mock_loader = Mock() - mock_loader.validate_migration_file = Mock() - mock_get_loader.return_value = mock_loader - - runner.loader.clear_cache = Mock() - runner.loader.load_sql = Mock() - runner.loader.has_query = Mock(return_value=True) - - # Mock different SQL for each migration - sql_statements = [ - "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT, email TEXT);", - "CREATE TABLE products (id INTEGER PRIMARY KEY, name TEXT, price REAL);", - "CREATE INDEX idx_users_email ON users(email);", - ] - - for i, (version, file_path) in enumerate(migration_files): - # run_ should return a callable that returns the SQL statements - mock_run.return_value = lambda file_path, idx=i: [sql_statements[idx]] - - migration = runner.load_migration(file_path) - result = runner.execute_upgrade(mock_driver, migration) - - assert result is not None - - # Verify execution order - executed = runner.get_executed_migrations() - assert len(executed) == 3 - assert executed[0]["version"] == "0001" - assert executed[1]["version"] == "0002" - assert executed[2]["version"] == "0003" - - -def test_migration_with_no_downgrade(temp_workspace_with_migrations: Path) -> None: - """Test migration execution when no downgrade is available.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create migration file with only upgrade - migration_file = migrations_dir / "0001_irreversible.sql" - migration_content = """ --- name: migrate-0001-up -CREATE TABLE irreversible_data AS -SELECT DISTINCT column1, column2 FROM legacy_table; -""" - migration_file.write_text(migration_content) - - runner = MockMigrationRunner(migrations_dir) - mock_driver = Mock() - - # Load migration metadata - with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: - mock_loader = Mock() - mock_loader.validate_migration_file = Mock() - mock_get_loader.return_value = mock_loader - - runner.loader.clear_cache = Mock() - runner.loader.load_sql = Mock() - # Only upgrade query exists - runner.loader.has_query = Mock(side_effect=lambda q: q.endswith("-up")) - - migration = runner.load_migration(migration_file) - - assert migration["has_upgrade"] is True - assert migration["has_downgrade"] is False - - # Execute upgrade should work - with patch("sqlspec.migrations.base.run_") as mock_run: - # run_ should return a callable that returns the SQL statements - mock_run.return_value = lambda file_path: [ - "CREATE TABLE irreversible_data AS SELECT DISTINCT column1, column2 FROM legacy_table;" - ] - - result = runner.execute_upgrade(mock_driver, migration) - assert result is not None - - # Execute downgrade should handle gracefully - with patch("sqlspec.migrations.base.run_") as mock_run, patch("sqlspec.migrations.base.logger"): - result = runner.execute_downgrade(mock_driver, migration) - - # Should not raise error, but may log warning - assert result is not None # Mock returns something - - -def test_migration_state_recording() -> None: - """Test recording migration state.""" - tracker = MockMigrationTracker() - mock_driver = Mock() - - # Record a migration - tracker.record_migration( - mock_driver, version="0001", description="create users table", execution_time_ms=150, checksum="abc123def456" - ) - - # Verify recording - applied_migrations = tracker.get_applied_migrations(mock_driver) - assert len(applied_migrations) == 1 - - migration = applied_migrations[0] - assert migration["version_num"] == "0001" - assert migration["description"] == "create users table" - assert migration["execution_time_ms"] == 150 - assert migration["checksum"] == "abc123def456" - - -def test_current_version_tracking() -> None: - """Test current version tracking.""" - tracker = MockMigrationTracker() - mock_driver = Mock() - - # Initially no version - assert tracker.get_current_version(mock_driver) is None - - # Record migrations in order - migrations = [ - ("0001", "initial schema", 100, "hash1"), - ("0002", "add users", 150, "hash2"), - ("0003", "add indexes", 75, "hash3"), - ] - - for version, desc, time_ms, checksum in migrations: - tracker.record_migration(mock_driver, version, desc, time_ms, checksum) - - # Current version should be the highest - current = tracker.get_current_version(mock_driver) - assert current == "0003" - - -def test_migration_removal() -> None: - """Test migration removal from tracking.""" - tracker = MockMigrationTracker() - mock_driver = Mock() - - # Record multiple migrations - tracker.record_migration(mock_driver, "0001", "first", 100, "hash1") - tracker.record_migration(mock_driver, "0002", "second", 150, "hash2") - tracker.record_migration(mock_driver, "0003", "third", 75, "hash3") - - assert len(tracker.get_applied_migrations(mock_driver)) == 3 - assert tracker.get_current_version(mock_driver) == "0003" - - # Remove the latest migration - tracker.remove_migration(mock_driver, "0003") - - assert len(tracker.get_applied_migrations(mock_driver)) == 2 - assert tracker.get_current_version(mock_driver) == "0002" - - # Remove a middle migration - tracker.remove_migration(mock_driver, "0001") - - migrations = tracker.get_applied_migrations(mock_driver) - assert len(migrations) == 1 - assert migrations[0]["version_num"] == "0002" - - -def test_applied_migrations_ordering() -> None: - """Test that applied migrations are returned in correct order.""" - tracker = MockMigrationTracker() - mock_driver = Mock() - - # Record migrations out of order - migrations_data = [("0003", "third migration"), ("0001", "first migration"), ("0002", "second migration")] - - for version, desc in migrations_data: - tracker.record_migration(mock_driver, version, desc, 100, f"hash_{version}") - - applied = tracker.get_applied_migrations(mock_driver) - - # Should be ordered by version (depends on mock implementation) - # In this mock, they're stored as inserted, but real implementation should sort - assert len(applied) == 3 - - # Verify all migrations are present - versions = [m["version_num"] for m in applied] - assert "0001" in versions - assert "0002" in versions - assert "0003" in versions - - -def test_migration_execution_failure(temp_workspace_with_migrations: Path) -> None: - """Test handling of migration execution failures.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create migration file with invalid SQL - migration_file = migrations_dir / "0001_broken.sql" - migration_content = """ --- name: migrate-0001-up -INVALID SQL STATEMENT THAT SHOULD FAIL; - --- name: migrate-0001-down -DROP TABLE IF EXISTS nonexistent_table; -""" - migration_file.write_text(migration_content) - - runner = MockMigrationRunner(migrations_dir) - mock_driver = Mock() - - # Load migration metadata - with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: - mock_loader = Mock() - mock_loader.validate_migration_file = Mock() - mock_get_loader.return_value = mock_loader - - runner.loader.clear_cache = Mock() - runner.loader.load_sql = Mock() - runner.loader.has_query = Mock(return_value=True) - - migration = runner.load_migration(migration_file) - - # Mock run_ to raise exception for invalid SQL - with patch("sqlspec.migrations.base.run_") as mock_run: - mock_run.side_effect = Exception("SQL syntax error") - - with pytest.raises(ValueError) as exc_info: - runner.execute_upgrade(mock_driver, migration) - - assert "Failed to load upgrade for migration 0001" in str(exc_info.value) - - -def test_missing_upgrade_migration(temp_workspace_with_migrations: Path) -> None: - """Test handling of missing upgrade migrations.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create migration file with only downgrade - migration_file = migrations_dir / "0001_downgrade_only.sql" - migration_content = """ --- name: migrate-0001-down -DROP TABLE legacy_table; -""" - migration_file.write_text(migration_content) - - runner = MockMigrationRunner(migrations_dir) - mock_driver = Mock() - - # Load migration metadata - with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: - mock_loader = Mock() - mock_loader.validate_migration_file = Mock() - mock_get_loader.return_value = mock_loader - - runner.loader.clear_cache = Mock() - runner.loader.load_sql = Mock() - # Only downgrade query exists - runner.loader.has_query = Mock(side_effect=lambda q: q.endswith("-down")) - - migration = runner.load_migration(migration_file) - - assert migration["has_upgrade"] is False - assert migration["has_downgrade"] is True - - # Attempt to execute upgrade should raise error - with pytest.raises(ValueError) as exc_info: - runner.execute_upgrade(mock_driver, migration) - - assert "has no upgrade query" in str(exc_info.value) - - -def test_corrupted_migration_file(temp_workspace_with_migrations: Path) -> None: - """Test handling of corrupted migration files.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create corrupted migration file - migration_file = migrations_dir / "0001_corrupted.sql" - migration_content = """ -This is not a valid migration file format. -It has no proper named query structure. --- name: incomplete -SELECT * FROM -""" - migration_file.write_text(migration_content) - - runner = MockMigrationRunner(migrations_dir) - - # Loading should handle corruption gracefully - with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: - mock_loader = Mock() - mock_loader.validate_migration_file.side_effect = Exception("File validation failed") - mock_get_loader.return_value = mock_loader - - with pytest.raises(Exception): - runner.load_migration(migration_file) - - -def test_duplicate_version_detection(temp_workspace_with_migrations: Path) -> None: - """Test detection of duplicate migration versions.""" - migrations_dir = temp_workspace_with_migrations / "migrations" - - # Create two files with same version - file1 = migrations_dir / "0001_first.sql" - file1.write_text(""" --- name: migrate-0001-up -CREATE TABLE first (id INTEGER); -""") - - file2 = migrations_dir / "0001_second.sql" - file2.write_text(""" --- name: migrate-0001-up -CREATE TABLE second (id INTEGER); -""") - - runner = MockMigrationRunner(migrations_dir) - - # Getting migration files should find both files with same version - files = runner.get_migration_files() - - # Both files should be found (the runner itself doesn't prevent duplicates) - # The validation logic would be in higher-level migration management - versions = [version for version, _ in files] - assert versions.count("0001") == 2 diff --git a/tests/unit/test_migrations/test_migration.py b/tests/unit/test_migrations/test_migration.py index 532c71835..b4d14e5c3 100644 --- a/tests/unit/test_migrations/test_migration.py +++ b/tests/unit/test_migrations/test_migration.py @@ -19,7 +19,6 @@ import pytest -from sqlspec.core.statement import SQL from sqlspec.migrations.base import BaseMigrationRunner @@ -293,7 +292,7 @@ def down(): with ( patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, + patch("sqlspec.migrations.base.await_") as mock_await, ): mock_loader = Mock() mock_loader.validate_migration_file = Mock() @@ -301,8 +300,8 @@ def down(): mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader - # Mock run_ to simulate successful down_sql call - mock_run.return_value = Mock(return_value=True) + # Mock await_ to simulate successful down_sql call + mock_await.return_value = Mock(return_value=True) metadata = runner._load_migration_metadata(migration_file) @@ -339,7 +338,7 @@ async def down(): with ( patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, + patch("sqlspec.migrations.base.await_") as mock_await, ): mock_loader = Mock() mock_loader.validate_migration_file = Mock() @@ -347,8 +346,8 @@ async def down(): mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader - # Mock run_ to simulate successful down_sql call - mock_run.return_value = Mock(return_value=True) + # Mock await_ to simulate successful down_sql call + mock_await.return_value = Mock(return_value=True) metadata = runner._load_migration_metadata(migration_file) @@ -384,7 +383,7 @@ async def down(): with ( patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, + patch("sqlspec.migrations.base.await_") as mock_await, ): mock_loader = Mock() mock_loader.validate_migration_file = Mock() @@ -392,8 +391,8 @@ async def down(): mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader - # Mock run_ to simulate successful down_sql call - mock_run.return_value = Mock(return_value=True) + # Mock await_ to simulate successful down_sql call + mock_await.return_value = Mock(return_value=True) metadata = runner._load_migration_metadata(migration_file) @@ -520,7 +519,7 @@ async def down(): with ( patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, + patch("sqlspec.migrations.base.await_") as mock_await, ): mock_loader = Mock() mock_loader.validate_migration_file = Mock() @@ -528,8 +527,8 @@ async def down(): mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader - # Mock run_ to simulate successful execution for Python files - mock_run.return_value = Mock(return_value=True) + # Mock await_ to simulate successful execution for Python files + mock_await.return_value = Mock(return_value=True) # Test loading metadata for all migrations all_metadata = [] @@ -643,15 +642,15 @@ def test_get_migration_sql_upgrade() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock run_ to return a function that returns SQL statements - mock_run.return_value = lambda file_path: ["CREATE TABLE test (id INTEGER);"] + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock await_ to return a function that returns SQL statements + mock_await.return_value = lambda file_path: ["CREATE TABLE test (id INTEGER);"] result = runner._get_migration_sql(migration, "up") - # Should create SQL object with the statement - assert isinstance(result, SQL) - assert "CREATE TABLE test (id INTEGER);" in result.sql + # Should return list of SQL statements + assert isinstance(result, list) + assert result == ["CREATE TABLE test (id INTEGER);"] def test_get_migration_sql_downgrade() -> None: @@ -666,15 +665,15 @@ def test_get_migration_sql_downgrade() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock run_ to return a function that returns SQL statements - mock_run.return_value = lambda file_path: ["DROP TABLE test;"] + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock await_ to return a function that returns SQL statements + mock_await.return_value = lambda file_path: ["DROP TABLE test;"] result = runner._get_migration_sql(migration, "down") - # Should create SQL object with the statement - assert isinstance(result, SQL) - assert "DROP TABLE test;" in result.sql + # Should return list of SQL statements + assert isinstance(result, list) + assert result == ["DROP TABLE test;"] def test_get_migration_sql_no_downgrade() -> None: @@ -727,9 +726,9 @@ def test_get_migration_sql_loader_error() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock run_ to raise an exception - mock_run.side_effect = Exception("Loader error") + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock await_ to raise an exception + mock_await.side_effect = Exception("Loader error") # Should raise error for upgrade with pytest.raises(ValueError) as exc_info: @@ -755,9 +754,9 @@ def test_get_migration_sql_empty_statements() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock run_ to return a function that returns empty list - mock_run.return_value = lambda file_path: [] + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock await_ to return a function that returns empty list + mock_await.return_value = lambda file_path: [] result = runner._get_migration_sql(migration, "up") assert result is None diff --git a/tests/unit/test_migrations/test_migration_commands.py b/tests/unit/test_migrations/test_migration_commands.py new file mode 100644 index 000000000..ac53185f7 --- /dev/null +++ b/tests/unit/test_migrations/test_migration_commands.py @@ -0,0 +1,313 @@ +"""Unit tests for migration commands functionality. + +Tests focused on MigrationCommands class behavior including: +- Async/sync command delegation +- Initialization behavior +- Configuration handling +- Error scenarios and edge cases +- Command routing and parameter passing +""" + +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from sqlspec.adapters.aiosqlite.config import AiosqliteConfig +from sqlspec.adapters.sqlite.config import SqliteConfig +from sqlspec.migrations.commands import AsyncMigrationCommands, MigrationCommands, SyncMigrationCommands + + +@pytest.fixture +def sync_config() -> SqliteConfig: + """Create a sync database config for testing.""" + return SqliteConfig(pool_config={"database": ":memory:"}) + + +@pytest.fixture +def async_config() -> AiosqliteConfig: + """Create an async database config for testing.""" + return AiosqliteConfig(pool_config={"database": ":memory:"}) + + +def test_migration_commands_sync_config_initialization(sync_config: SqliteConfig) -> None: + """Test MigrationCommands initializes with sync implementation for sync config.""" + commands = MigrationCommands(sync_config) + + assert not commands._is_async + assert isinstance(commands._impl, SyncMigrationCommands) + + +def test_migration_commands_async_config_initialization(async_config: AiosqliteConfig) -> None: + """Test MigrationCommands initializes with async implementation for async config.""" + commands = MigrationCommands(async_config) + + assert commands._is_async + assert isinstance(commands._impl, AsyncMigrationCommands) + + +def test_migration_commands_sync_init_delegation(sync_config: SqliteConfig) -> None: + """Test that sync config init is delegated directly to sync implementation.""" + with patch.object(SyncMigrationCommands, "init") as mock_init: + commands = MigrationCommands(sync_config) + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = str(Path(temp_dir) / "migrations") + + commands.init(migration_dir, package=False) + + mock_init.assert_called_once_with(migration_dir, package=False) + + +def test_migration_commands_async_init_delegation(async_config: AiosqliteConfig) -> None: + """Test that async config init uses await_ wrapper.""" + with ( + patch.object(AsyncMigrationCommands, "init", new_callable=AsyncMock), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Use AsyncMock and set up await_ to return a simple callable + AsyncMock(return_value=None) + mock_await.return_value = Mock(return_value=None) + + commands = MigrationCommands(async_config) + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = str(Path(temp_dir) / "migrations") + + commands.init(migration_dir, package=True) + + # Verify await_ was called with raise_sync_error=False + mock_await.assert_called_once() + call_args = mock_await.call_args + assert call_args[1]["raise_sync_error"] is False + + +def test_migration_commands_sync_current_delegation(sync_config: SqliteConfig) -> None: + """Test that sync config current is delegated directly to sync implementation.""" + with patch.object(SyncMigrationCommands, "current") as mock_current: + commands = MigrationCommands(sync_config) + + commands.current(verbose=True) + + mock_current.assert_called_once_with(verbose=True) + + +def test_migration_commands_async_current_delegation(async_config: AiosqliteConfig) -> None: + """Test that async config current uses await_ wrapper.""" + with ( + patch.object(AsyncMigrationCommands, "current", new_callable=AsyncMock), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Set up await_ to return a callable that returns the expected value + mock_await.return_value = Mock(return_value="test_version") + + commands = MigrationCommands(async_config) + + result = commands.current(verbose=False) + + # Verify await_ was called with raise_sync_error=False + mock_await.assert_called_once() + call_args = mock_await.call_args + assert call_args[1]["raise_sync_error"] is False + assert result == "test_version" + + +def test_migration_commands_sync_upgrade_delegation(sync_config: SqliteConfig) -> None: + """Test that sync config upgrade is delegated directly to sync implementation.""" + with patch.object(SyncMigrationCommands, "upgrade") as mock_upgrade: + commands = MigrationCommands(sync_config) + + commands.upgrade(revision="001") + + mock_upgrade.assert_called_once_with(revision="001") + + +def test_migration_commands_async_upgrade_delegation(async_config: AiosqliteConfig) -> None: + """Test that async config upgrade uses await_ wrapper.""" + with ( + patch.object(AsyncMigrationCommands, "upgrade", new_callable=AsyncMock), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Set up await_ to return a callable that returns None + mock_await.return_value = Mock(return_value=None) + + commands = MigrationCommands(async_config) + + commands.upgrade(revision="002") + + # Verify await_ was called with raise_sync_error=False + mock_await.assert_called_once() + call_args = mock_await.call_args + assert call_args[1]["raise_sync_error"] is False + + +def test_migration_commands_sync_downgrade_delegation(sync_config: SqliteConfig) -> None: + """Test that sync config downgrade is delegated directly to sync implementation.""" + with patch.object(SyncMigrationCommands, "downgrade") as mock_downgrade: + commands = MigrationCommands(sync_config) + + commands.downgrade(revision="base") + + mock_downgrade.assert_called_once_with(revision="base") + + +def test_migration_commands_async_downgrade_delegation(async_config: AiosqliteConfig) -> None: + """Test that async config downgrade uses await_ wrapper.""" + with ( + patch.object(AsyncMigrationCommands, "downgrade", new_callable=AsyncMock), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Set up await_ to return a callable that returns None + mock_await.return_value = Mock(return_value=None) + + commands = MigrationCommands(async_config) + + commands.downgrade(revision="001") + + # Verify await_ was called with raise_sync_error=False + mock_await.assert_called_once() + call_args = mock_await.call_args + assert call_args[1]["raise_sync_error"] is False + + +def test_migration_commands_sync_stamp_delegation(sync_config: SqliteConfig) -> None: + """Test that sync config stamp is delegated directly to sync implementation.""" + with patch.object(SyncMigrationCommands, "stamp") as mock_stamp: + commands = MigrationCommands(sync_config) + + commands.stamp("001") + + mock_stamp.assert_called_once_with("001") + + +def test_migration_commands_async_stamp_delegation(async_config: AiosqliteConfig) -> None: + """Test that async config stamp uses await_ wrapper.""" + with ( + patch.object(AsyncMigrationCommands, "stamp", new_callable=AsyncMock), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Set up await_ to return a callable that returns None + mock_await.return_value = Mock(return_value=None) + + commands = MigrationCommands(async_config) + + commands.stamp("002") + + # Verify await_ was called with raise_sync_error=False + mock_await.assert_called_once() + call_args = mock_await.call_args + assert call_args[1]["raise_sync_error"] is False + + +def test_migration_commands_sync_revision_delegation(sync_config: SqliteConfig) -> None: + """Test that sync config revision is delegated directly to sync implementation.""" + with patch.object(SyncMigrationCommands, "revision") as mock_revision: + commands = MigrationCommands(sync_config) + + commands.revision("Test revision", "sql") + + mock_revision.assert_called_once_with("Test revision", "sql") + + +def test_migration_commands_async_revision_delegation(async_config: AiosqliteConfig) -> None: + """Test that async config revision uses await_ wrapper.""" + with ( + patch.object(AsyncMigrationCommands, "revision", new_callable=AsyncMock), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Set up await_ to return a callable that returns None + mock_await.return_value = Mock(return_value=None) + + commands = MigrationCommands(async_config) + + commands.revision("Test async revision", "python") + + # Verify await_ was called with raise_sync_error=False + mock_await.assert_called_once() + call_args = mock_await.call_args + assert call_args[1]["raise_sync_error"] is False + + +def test_sync_migration_commands_initialization(sync_config: SqliteConfig) -> None: + """Test SyncMigrationCommands proper initialization.""" + commands = SyncMigrationCommands(sync_config) + + assert commands.config == sync_config + assert hasattr(commands, "tracker") + assert hasattr(commands, "runner") + + +def test_async_migration_commands_initialization(async_config: AiosqliteConfig) -> None: + """Test AsyncMigrationCommands proper initialization.""" + commands = AsyncMigrationCommands(async_config) + + assert commands.config == async_config + assert hasattr(commands, "tracker") + assert hasattr(commands, "runner") + + +def test_sync_migration_commands_init_creates_directory(sync_config: SqliteConfig) -> None: + """Test that SyncMigrationCommands init creates migration directory structure.""" + commands = SyncMigrationCommands(sync_config) + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + commands.init(str(migration_dir), package=True) + + assert migration_dir.exists() + assert (migration_dir / "__init__.py").exists() + + +def test_sync_migration_commands_init_without_package(sync_config: SqliteConfig) -> None: + """Test that SyncMigrationCommands init creates directory without __init__.py when package=False.""" + commands = SyncMigrationCommands(sync_config) + + with tempfile.TemporaryDirectory() as temp_dir: + migration_dir = Path(temp_dir) / "migrations" + + commands.init(str(migration_dir), package=False) + + assert migration_dir.exists() + assert not (migration_dir / "__init__.py").exists() + + +def test_migration_commands_error_propagation(async_config: AiosqliteConfig) -> None: + """Test that errors from underlying implementations are properly propagated.""" + with ( + patch.object(AsyncMigrationCommands, "upgrade", side_effect=ValueError("Test error")), + patch("sqlspec.migrations.commands.await_") as mock_await, + ): + # Set up await_ to raise the same error + mock_await.return_value = Mock(side_effect=ValueError("Test error")) + + commands = MigrationCommands(async_config) + + with pytest.raises(ValueError, match="Test error"): + commands.upgrade() + + +def test_migration_commands_parameter_forwarding(sync_config: SqliteConfig) -> None: + """Test that all parameters are properly forwarded to underlying implementations.""" + with patch.object(SyncMigrationCommands, "upgrade") as mock_upgrade: + commands = MigrationCommands(sync_config) + + # Test with various parameter combinations + commands.upgrade() + mock_upgrade.assert_called_with(revision="head") + + commands.upgrade("specific_revision") + mock_upgrade.assert_called_with(revision="specific_revision") + + +def test_migration_commands_config_type_detection(sync_config: SqliteConfig, async_config: AiosqliteConfig) -> None: + """Test that MigrationCommands correctly detects async vs sync configs.""" + sync_commands = MigrationCommands(sync_config) + async_commands = MigrationCommands(async_config) + + assert not sync_commands._is_async + assert async_commands._is_async + + assert isinstance(sync_commands._impl, SyncMigrationCommands) + assert isinstance(async_commands._impl, AsyncMigrationCommands) diff --git a/tests/unit/test_migrations/test_migration_execution.py b/tests/unit/test_migrations/test_migration_execution.py index b50386ae6..b4d5921aa 100644 --- a/tests/unit/test_migrations/test_migration_execution.py +++ b/tests/unit/test_migrations/test_migration_execution.py @@ -20,7 +20,6 @@ import pytest -from sqlspec.core.statement import SQL from sqlspec.driver import ExecutionResult from sqlspec.migrations.base import BaseMigrationRunner, BaseMigrationTracker @@ -34,16 +33,13 @@ def temp_workspace() -> Generator[Path, None, None]: @pytest.fixture -def temp_workspace_with_migrations() -> Generator[Path, None, None]: +def temp_workspace_with_migrations(tmp_path: Path) -> Path: """Create a temporary workspace with migrations directory for tests.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) + # Create migrations directory in pytest's tmp_path + migrations_dir = tmp_path / "migrations" + migrations_dir.mkdir() - # Create migrations directory - migrations_dir = workspace / "migrations" - migrations_dir.mkdir() - - yield workspace + return tmp_path class MockMigrationTracker(BaseMigrationTracker): @@ -132,15 +128,16 @@ def test_tracking_table_sql_generation() -> None: create_sql = tracker._get_create_table_sql() - assert isinstance(create_sql, SQL) - assert "CREATE TABLE" in create_sql.sql.upper() - assert "test_migrations" in create_sql.sql - assert "version_num" in create_sql.sql - assert "description" in create_sql.sql - assert "applied_at" in create_sql.sql - assert "execution_time_ms" in create_sql.sql - assert "checksum" in create_sql.sql - assert "applied_by" in create_sql.sql + assert hasattr(create_sql, "to_statement") + stmt = create_sql.to_statement() + assert "CREATE TABLE" in stmt.sql.upper() + assert "test_migrations" in stmt.sql + assert "version_num" in stmt.sql + assert "description" in stmt.sql + assert "applied_at" in stmt.sql + assert "execution_time_ms" in stmt.sql + assert "checksum" in stmt.sql + assert "applied_by" in stmt.sql def test_current_version_sql_generation() -> None: @@ -149,12 +146,13 @@ def test_current_version_sql_generation() -> None: version_sql = tracker._get_current_version_sql() - assert isinstance(version_sql, SQL) - assert "SELECT" in version_sql.sql.upper() - assert "version_num" in version_sql.sql - assert "test_migrations" in version_sql.sql - assert "ORDER BY" in version_sql.sql.upper() - assert "LIMIT" in version_sql.sql.upper() + assert hasattr(version_sql, "to_statement") + stmt = version_sql.to_statement() + assert "SELECT" in stmt.sql.upper() + assert "version_num" in stmt.sql + assert "test_migrations" in stmt.sql + assert "ORDER BY" in stmt.sql.upper() + assert "LIMIT" in stmt.sql.upper() def test_applied_migrations_sql_generation() -> None: @@ -163,12 +161,13 @@ def test_applied_migrations_sql_generation() -> None: applied_sql = tracker._get_applied_migrations_sql() - assert isinstance(applied_sql, SQL) - assert "SELECT" in applied_sql.sql.upper() - assert "*" in applied_sql.sql - assert "test_migrations" in applied_sql.sql.lower() - assert "ORDER BY" in applied_sql.sql.upper() - assert "version_num" in applied_sql.sql.lower() + assert hasattr(applied_sql, "to_statement") + stmt = applied_sql.to_statement() + assert "SELECT" in stmt.sql.upper() + assert "*" in stmt.sql + assert "test_migrations" in stmt.sql.lower() + assert "ORDER BY" in stmt.sql.upper() + assert "version_num" in stmt.sql.lower() def test_record_migration_sql_generation() -> None: @@ -179,15 +178,16 @@ def test_record_migration_sql_generation() -> None: version="0001", description="test migration", execution_time_ms=250, checksum="abc123", applied_by="test_user" ) - assert isinstance(record_sql, SQL) - assert "INSERT INTO" in record_sql.sql.upper() - assert "test_migrations" in record_sql.sql - assert "VALUES" in record_sql.sql.upper() + assert hasattr(record_sql, "to_statement") + stmt = record_sql.to_statement() + assert "INSERT INTO" in stmt.sql.upper() + assert "test_migrations" in stmt.sql + assert "VALUES" in stmt.sql.upper() # Check parameters are set - params = record_sql.parameters - assert "0001" in str(params) or "0001" in record_sql.sql - assert "test migration" in str(params) or "test migration" in record_sql.sql + params = stmt.parameters + assert "0001" in str(params) or "0001" in stmt.sql + assert "test migration" in str(params) or "test migration" in stmt.sql def test_remove_migration_sql_generation() -> None: @@ -196,11 +196,12 @@ def test_remove_migration_sql_generation() -> None: remove_sql = tracker._get_remove_migration_sql("0001") - assert isinstance(remove_sql, SQL) - assert "DELETE" in remove_sql.sql.upper() - assert "test_migrations" in remove_sql.sql - assert "WHERE" in remove_sql.sql.upper() - assert "version_num" in remove_sql.sql + assert hasattr(remove_sql, "to_statement") + stmt = remove_sql.to_statement() + assert "DELETE" in stmt.sql.upper() + assert "test_migrations" in stmt.sql + assert "WHERE" in stmt.sql.upper() + assert "version_num" in stmt.sql def test_single_migration_upgrade_execution(temp_workspace_with_migrations: Path) -> None: @@ -230,8 +231,6 @@ def test_single_migration_upgrade_execution(temp_workspace_with_migrations: Path with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: mock_loader = Mock() mock_loader.validate_migration_file = Mock() - mock_loader.get_up_sql = Mock() - mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader runner.loader.clear_cache = Mock() @@ -241,8 +240,11 @@ def test_single_migration_upgrade_execution(temp_workspace_with_migrations: Path migration = runner.load_migration(migration_file) # Execute upgrade - with patch("sqlspec.migrations.base.run_") as mock_run: - mock_run.return_value = lambda file_path: [ + from unittest.mock import AsyncMock + + with patch.object(migration["loader"], "get_up_sql", new_callable=AsyncMock) as mock_get_up_sql: + # get_up_sql is async and should return the SQL statements + mock_get_up_sql.return_value = [ "CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT NOT NULL, email TEXT UNIQUE NOT NULL);" ] @@ -259,6 +261,8 @@ def test_single_migration_upgrade_execution(temp_workspace_with_migrations: Path def test_single_migration_downgrade_execution(temp_workspace_with_migrations: Path) -> None: """Test execution of a single migration downgrade.""" + from unittest.mock import AsyncMock + migrations_dir = temp_workspace_with_migrations / "migrations" # Create migration file @@ -283,8 +287,6 @@ def test_single_migration_downgrade_execution(temp_workspace_with_migrations: Pa with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: mock_loader = Mock() mock_loader.validate_migration_file = Mock() - mock_loader.get_up_sql = Mock() - mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader runner.loader.clear_cache = Mock() @@ -294,8 +296,9 @@ def test_single_migration_downgrade_execution(temp_workspace_with_migrations: Pa migration = runner.load_migration(migration_file) # Execute downgrade - with patch("sqlspec.migrations.base.run_") as mock_run: - mock_run.return_value = lambda file_path: ["DROP TABLE users;"] + with patch.object(migration["loader"], "get_down_sql", new_callable=AsyncMock) as mock_get_down_sql: + # get_down_sql is async and should return the SQL statements + mock_get_down_sql.return_value = ["DROP TABLE users;"] result = runner.execute_downgrade(mock_driver, migration) @@ -310,6 +313,8 @@ def test_single_migration_downgrade_execution(temp_workspace_with_migrations: Pa def test_multiple_migrations_execution_order(temp_workspace_with_migrations: Path) -> None: """Test execution order of multiple migrations.""" + from unittest.mock import AsyncMock + migrations_dir = temp_workspace_with_migrations / "migrations" # Create multiple migration files (in non-sequential creation order) @@ -352,14 +357,9 @@ def test_multiple_migrations_execution_order(temp_workspace_with_migrations: Pat assert migration_files[2][0] == "0003" # Indexes last # Execute all migrations in order - with ( - patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, - ): + with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: mock_loader = Mock() mock_loader.validate_migration_file = Mock() - mock_loader.get_up_sql = Mock() - mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader runner.loader.clear_cache = Mock() @@ -374,12 +374,13 @@ def test_multiple_migrations_execution_order(temp_workspace_with_migrations: Pat ] for i, (version, file_path) in enumerate(migration_files): - mock_run.return_value = lambda fp: [sql_statements[i]] - migration = runner.load_migration(file_path) - result = runner.execute_upgrade(mock_driver, migration) - assert result is not None + # Mock the get_up_sql method on the loader + with patch.object(migration["loader"], "get_up_sql", new_callable=AsyncMock) as mock_get_up_sql: + mock_get_up_sql.return_value = [sql_statements[i]] + result = runner.execute_upgrade(mock_driver, migration) + assert result is not None # Verify execution order executed = runner.get_executed_migrations() @@ -391,6 +392,8 @@ def test_multiple_migrations_execution_order(temp_workspace_with_migrations: Pat def test_migration_with_no_downgrade(temp_workspace_with_migrations: Path) -> None: """Test migration execution when no downgrade is available.""" + from unittest.mock import AsyncMock + migrations_dir = temp_workspace_with_migrations / "migrations" # Create migration file with only upgrade @@ -409,8 +412,6 @@ def test_migration_with_no_downgrade(temp_workspace_with_migrations: Path) -> No with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: mock_loader = Mock() mock_loader.validate_migration_file = Mock() - mock_loader.get_up_sql = Mock() - mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader runner.loader.clear_cache = Mock() @@ -424,8 +425,9 @@ def test_migration_with_no_downgrade(temp_workspace_with_migrations: Path) -> No assert migration["has_downgrade"] is False # Execute upgrade should work - with patch("sqlspec.migrations.base.run_") as mock_run: - mock_run.return_value = lambda file_path: [ + with patch.object(migration["loader"], "get_up_sql", new_callable=AsyncMock) as mock_get_up_sql: + # get_up_sql is async and should return the SQL statements + mock_get_up_sql.return_value = [ "CREATE TABLE irreversible_data AS SELECT DISTINCT column1, column2 FROM legacy_table;" ] @@ -433,7 +435,12 @@ def test_migration_with_no_downgrade(temp_workspace_with_migrations: Path) -> No assert result is not None # Execute downgrade should handle gracefully - with patch("sqlspec.migrations.base.run_") as mock_run, patch("sqlspec.migrations.base.logger"): + with ( + patch.object(migration["loader"], "get_down_sql", new_callable=AsyncMock) as mock_get_down_sql, + patch("sqlspec.migrations.base.logger"), + ): + # Return empty list for no downgrade + mock_get_down_sql.return_value = [] result = runner.execute_downgrade(mock_driver, migration) # Should not raise error, but may log warning @@ -557,8 +564,6 @@ def test_migration_execution_failure(temp_workspace_with_migrations: Path) -> No with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: mock_loader = Mock() mock_loader.validate_migration_file = Mock() - mock_loader.get_up_sql = Mock() - mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader runner.loader.clear_cache = Mock() @@ -567,9 +572,13 @@ def test_migration_execution_failure(temp_workspace_with_migrations: Path) -> No migration = runner.load_migration(migration_file) - # Mock run_ to raise exception for invalid SQL - with patch("sqlspec.migrations.base.run_") as mock_run: - mock_run.side_effect = Exception("SQL syntax error") + # Mock get_up_sql to raise exception for invalid SQL + with patch.object(migration["loader"], "get_up_sql") as mock_get_up_sql: + + async def mock_get_up_error() -> None: + raise Exception("SQL syntax error") + + mock_get_up_sql.return_value = mock_get_up_error() with pytest.raises(ValueError) as exc_info: runner.execute_upgrade(mock_driver, migration) @@ -596,8 +605,6 @@ def test_missing_upgrade_migration(temp_workspace_with_migrations: Path) -> None with patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader: mock_loader = Mock() mock_loader.validate_migration_file = Mock() - mock_loader.get_up_sql = Mock() - mock_loader.get_down_sql = Mock() mock_get_loader.return_value = mock_loader runner.loader.clear_cache = Mock() diff --git a/tests/unit/test_migrations/test_migration_runner.py b/tests/unit/test_migrations/test_migration_runner.py index e3d160494..0f2482ffd 100644 --- a/tests/unit/test_migrations/test_migration_runner.py +++ b/tests/unit/test_migrations/test_migration_runner.py @@ -17,7 +17,6 @@ import pytest -from sqlspec.core.statement import SQL from sqlspec.migrations.base import BaseMigrationRunner @@ -253,7 +252,7 @@ def down(): with ( patch("sqlspec.migrations.base.get_migration_loader") as mock_get_loader, - patch("sqlspec.migrations.base.run_") as mock_run, + patch("sqlspec.migrations.base.await_") as mock_await, ): mock_loader = Mock() mock_loader.validate_migration_file = Mock() @@ -262,7 +261,7 @@ def down(): mock_get_loader.return_value = mock_loader # Mock successful down_sql execution - mock_run.return_value = Mock(return_value=True) + mock_await.return_value = Mock(return_value=True) metadata = runner.load_migration(migration_file) @@ -288,16 +287,16 @@ def test_get_migration_sql_upgrade_success() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock successful SQL generation - run_ should return a callable that returns the statements - mock_run.return_value = Mock(return_value=["CREATE TABLE test (id INTEGER PRIMARY KEY);"]) + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock successful SQL generation - await_ should return a callable that returns the statements + mock_await.return_value = Mock(return_value=["CREATE TABLE test (id INTEGER PRIMARY KEY);"]) result = runner._get_migration_sql(migration, "up") - # Should return SQL object with expected SQL text + # Should return list of SQL statements assert result is not None - assert isinstance(result, SQL) - assert result.sql == "CREATE TABLE test (id INTEGER PRIMARY KEY);" + assert isinstance(result, list) + assert result == ["CREATE TABLE test (id INTEGER PRIMARY KEY);"] def test_get_migration_sql_downgrade_success() -> None: @@ -312,16 +311,16 @@ def test_get_migration_sql_downgrade_success() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock successful SQL generation - run_ should return a callable that returns the statements - mock_run.return_value = Mock(return_value=["DROP TABLE test;"]) + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock successful SQL generation - await_ should return a callable that returns the statements + mock_await.return_value = Mock(return_value=["DROP TABLE test;"]) result = runner._get_migration_sql(migration, "down") - # Should return SQL object with expected SQL text + # Should return list of SQL statements assert result is not None - assert isinstance(result, SQL) - assert result.sql == "DROP TABLE test;" + assert isinstance(result, list) + assert result == ["DROP TABLE test;"] def test_get_migration_sql_no_downgrade_warning() -> None: @@ -374,9 +373,9 @@ def test_get_migration_sql_loader_exception_upgrade() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock loader exception - run_ should return a callable that raises an exception - mock_run.return_value = Mock(side_effect=Exception("Loader failed to parse migration")) + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock loader exception - await_ should return a callable that raises an exception + mock_await.return_value = Mock(side_effect=Exception("Loader failed to parse migration")) with pytest.raises(ValueError) as exc_info: runner._get_migration_sql(migration, "up") @@ -396,9 +395,13 @@ def test_get_migration_sql_loader_exception_downgrade() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run, patch("sqlspec.migrations.base.logger") as mock_logger: - # Mock loader exception for downgrade - run_ should return a callable that raises an exception - mock_run.return_value = Mock(side_effect=Exception("Downgrade loader failed")) + with patch("sqlspec.migrations.base.await_") as mock_await, patch("sqlspec.migrations.base.logger") as mock_logger: + # Mock loader exception for downgrade - await_ should return a callable that raises an exception + # Use a regular function instead of Mock to avoid async coroutine issues + def mock_loader_function() -> None: + raise Exception("Downgrade loader failed") + + mock_await.return_value = mock_loader_function result = runner._get_migration_sql(migration, "down") @@ -423,9 +426,9 @@ def test_get_migration_sql_empty_statements() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock empty statements list - run_ should return a callable that returns an empty list - mock_run.return_value = Mock(return_value=[]) + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock empty statements list - await_ should return a callable that returns an empty list + mock_await.return_value = Mock(return_value=[]) result = runner._get_migration_sql(migration, "up") @@ -445,9 +448,9 @@ def test_get_migration_sql_none_statements() -> None: "loader": Mock(), } - with patch("sqlspec.migrations.base.run_") as mock_run: - # Mock None return - run_ should return a callable that returns None - mock_run.return_value = Mock(return_value=None) + with patch("sqlspec.migrations.base.await_") as mock_await: + # Mock None return - await_ should return a callable that returns None + mock_await.return_value = Mock(return_value=None) result = runner._get_migration_sql(migration, "up") diff --git a/uv.lock b/uv.lock index 113cfe1dd..2745cab28 100644 --- a/uv.lock +++ b/uv.lock @@ -919,97 +919,97 @@ wheels = [ [[package]] name = "coverage" -version = "7.10.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/2c/253cc41cd0f40b84c1c34c5363e0407d73d4a1cae005fed6db3b823175bd/coverage-7.10.3.tar.gz", hash = "sha256:812ba9250532e4a823b070b0420a36499859542335af3dca8f47fc6aa1a05619", size = 822936, upload-time = "2025-08-10T21:27:39.968Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/44/e14576c34b37764c821866909788ff7463228907ab82bae188dab2b421f1/coverage-7.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53808194afdf948c462215e9403cca27a81cf150d2f9b386aee4dab614ae2ffe", size = 215964, upload-time = "2025-08-10T21:25:22.828Z" }, - { url = "https://files.pythonhosted.org/packages/e6/15/f4f92d9b83100903efe06c9396ee8d8bdba133399d37c186fc5b16d03a87/coverage-7.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f4d1b837d1abf72187a61645dbf799e0d7705aa9232924946e1f57eb09a3bf00", size = 216361, upload-time = "2025-08-10T21:25:25.603Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3a/c92e8cd5e89acc41cfc026dfb7acedf89661ce2ea1ee0ee13aacb6b2c20c/coverage-7.10.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2a90dd4505d3cc68b847ab10c5ee81822a968b5191664e8a0801778fa60459fa", size = 243115, upload-time = "2025-08-10T21:25:27.09Z" }, - { url = "https://files.pythonhosted.org/packages/23/53/c1d8c2778823b1d95ca81701bb8f42c87dc341a2f170acdf716567523490/coverage-7.10.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d52989685ff5bf909c430e6d7f6550937bc6d6f3e6ecb303c97a86100efd4596", size = 244927, upload-time = "2025-08-10T21:25:28.77Z" }, - { url = "https://files.pythonhosted.org/packages/79/41/1e115fd809031f432b4ff8e2ca19999fb6196ab95c35ae7ad5e07c001130/coverage-7.10.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdb558a1d97345bde3a9f4d3e8d11c9e5611f748646e9bb61d7d612a796671b5", size = 246784, upload-time = "2025-08-10T21:25:30.195Z" }, - { url = "https://files.pythonhosted.org/packages/c7/b2/0eba9bdf8f1b327ae2713c74d4b7aa85451bb70622ab4e7b8c000936677c/coverage-7.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c9e6331a8f09cb1fc8bda032752af03c366870b48cce908875ba2620d20d0ad4", size = 244828, upload-time = "2025-08-10T21:25:31.785Z" }, - { url = "https://files.pythonhosted.org/packages/1f/cc/74c56b6bf71f2a53b9aa3df8bc27163994e0861c065b4fe3a8ac290bed35/coverage-7.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:992f48bf35b720e174e7fae916d943599f1a66501a2710d06c5f8104e0756ee1", size = 242844, upload-time = "2025-08-10T21:25:33.37Z" }, - { url = "https://files.pythonhosted.org/packages/b6/7b/ac183fbe19ac5596c223cb47af5737f4437e7566100b7e46cc29b66695a5/coverage-7.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c5595fc4ad6a39312c786ec3326d7322d0cf10e3ac6a6df70809910026d67cfb", size = 243721, upload-time = "2025-08-10T21:25:34.939Z" }, - { url = "https://files.pythonhosted.org/packages/57/96/cb90da3b5a885af48f531905234a1e7376acfc1334242183d23154a1c285/coverage-7.10.3-cp310-cp310-win32.whl", hash = "sha256:9e92fa1f2bd5a57df9d00cf9ce1eb4ef6fccca4ceabec1c984837de55329db34", size = 218481, upload-time = "2025-08-10T21:25:36.935Z" }, - { url = "https://files.pythonhosted.org/packages/15/67/1ba4c7d75745c4819c54a85766e0a88cc2bff79e1760c8a2debc34106dc2/coverage-7.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:b96524d6e4a3ce6a75c56bb15dbd08023b0ae2289c254e15b9fbdddf0c577416", size = 219382, upload-time = "2025-08-10T21:25:38.267Z" }, - { url = "https://files.pythonhosted.org/packages/87/04/810e506d7a19889c244d35199cbf3239a2f952b55580aa42ca4287409424/coverage-7.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2ff2e2afdf0d51b9b8301e542d9c21a8d084fd23d4c8ea2b3a1b3c96f5f7397", size = 216075, upload-time = "2025-08-10T21:25:39.891Z" }, - { url = "https://files.pythonhosted.org/packages/2e/50/6b3fbab034717b4af3060bdaea6b13dfdc6b1fad44b5082e2a95cd378a9a/coverage-7.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:18ecc5d1b9a8c570f6c9b808fa9a2b16836b3dd5414a6d467ae942208b095f85", size = 216476, upload-time = "2025-08-10T21:25:41.137Z" }, - { url = "https://files.pythonhosted.org/packages/c7/96/4368c624c1ed92659812b63afc76c492be7867ac8e64b7190b88bb26d43c/coverage-7.10.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1af4461b25fe92889590d438905e1fc79a95680ec2a1ff69a591bb3fdb6c7157", size = 246865, upload-time = "2025-08-10T21:25:42.408Z" }, - { url = "https://files.pythonhosted.org/packages/34/12/5608f76070939395c17053bf16e81fd6c06cf362a537ea9d07e281013a27/coverage-7.10.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3966bc9a76b09a40dc6063c8b10375e827ea5dfcaffae402dd65953bef4cba54", size = 248800, upload-time = "2025-08-10T21:25:44.098Z" }, - { url = "https://files.pythonhosted.org/packages/ce/52/7cc90c448a0ad724283cbcdfd66b8d23a598861a6a22ac2b7b8696491798/coverage-7.10.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:205a95b87ef4eb303b7bc5118b47b6b6604a644bcbdb33c336a41cfc0a08c06a", size = 250904, upload-time = "2025-08-10T21:25:45.384Z" }, - { url = "https://files.pythonhosted.org/packages/e6/70/9967b847063c1c393b4f4d6daab1131558ebb6b51f01e7df7150aa99f11d/coverage-7.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b3801b79fb2ad61e3c7e2554bab754fc5f105626056980a2b9cf3aef4f13f84", size = 248597, upload-time = "2025-08-10T21:25:47.059Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fe/263307ce6878b9ed4865af42e784b42bb82d066bcf10f68defa42931c2c7/coverage-7.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0dc69c60224cda33d384572da945759756e3f06b9cdac27f302f53961e63160", size = 246647, upload-time = "2025-08-10T21:25:48.334Z" }, - { url = "https://files.pythonhosted.org/packages/8e/27/d27af83ad162eba62c4eb7844a1de6cf7d9f6b185df50b0a3514a6f80ddd/coverage-7.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a83d4f134bab2c7ff758e6bb1541dd72b54ba295ced6a63d93efc2e20cb9b124", size = 247290, upload-time = "2025-08-10T21:25:49.945Z" }, - { url = "https://files.pythonhosted.org/packages/28/83/904ff27e15467a5622dbe9ad2ed5831b4a616a62570ec5924d06477dff5a/coverage-7.10.3-cp311-cp311-win32.whl", hash = "sha256:54e409dd64e5302b2a8fdf44ec1c26f47abd1f45a2dcf67bd161873ee05a59b8", size = 218521, upload-time = "2025-08-10T21:25:51.208Z" }, - { url = "https://files.pythonhosted.org/packages/b8/29/bc717b8902faaccf0ca486185f0dcab4778561a529dde51cb157acaafa16/coverage-7.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:30c601610a9b23807c5e9e2e442054b795953ab85d525c3de1b1b27cebeb2117", size = 219412, upload-time = "2025-08-10T21:25:52.494Z" }, - { url = "https://files.pythonhosted.org/packages/7b/7a/5a1a7028c11bb589268c656c6b3f2bbf06e0aced31bbdf7a4e94e8442cc0/coverage-7.10.3-cp311-cp311-win_arm64.whl", hash = "sha256:dabe662312a97958e932dee056f2659051d822552c0b866823e8ba1c2fe64770", size = 218091, upload-time = "2025-08-10T21:25:54.102Z" }, - { url = "https://files.pythonhosted.org/packages/b8/62/13c0b66e966c43d7aa64dadc8cd2afa1f5a2bf9bb863bdabc21fb94e8b63/coverage-7.10.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:449c1e2d3a84d18bd204258a897a87bc57380072eb2aded6a5b5226046207b42", size = 216262, upload-time = "2025-08-10T21:25:55.367Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f0/59fdf79be7ac2f0206fc739032f482cfd3f66b18f5248108ff192741beae/coverage-7.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d4f9ce50b9261ad196dc2b2e9f1fbbee21651b54c3097a25ad783679fd18294", size = 216496, upload-time = "2025-08-10T21:25:56.759Z" }, - { url = "https://files.pythonhosted.org/packages/34/b1/bc83788ba31bde6a0c02eb96bbc14b2d1eb083ee073beda18753fa2c4c66/coverage-7.10.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4dd4564207b160d0d45c36a10bc0a3d12563028e8b48cd6459ea322302a156d7", size = 247989, upload-time = "2025-08-10T21:25:58.067Z" }, - { url = "https://files.pythonhosted.org/packages/0c/29/f8bdf88357956c844bd872e87cb16748a37234f7f48c721dc7e981145eb7/coverage-7.10.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5ca3c9530ee072b7cb6a6ea7b640bcdff0ad3b334ae9687e521e59f79b1d0437", size = 250738, upload-time = "2025-08-10T21:25:59.406Z" }, - { url = "https://files.pythonhosted.org/packages/ae/df/6396301d332b71e42bbe624670af9376f63f73a455cc24723656afa95796/coverage-7.10.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b6df359e59fa243c9925ae6507e27f29c46698359f45e568fd51b9315dbbe587", size = 251868, upload-time = "2025-08-10T21:26:00.65Z" }, - { url = "https://files.pythonhosted.org/packages/91/21/d760b2df6139b6ef62c9cc03afb9bcdf7d6e36ed4d078baacffa618b4c1c/coverage-7.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a181e4c2c896c2ff64c6312db3bda38e9ade2e1aa67f86a5628ae85873786cea", size = 249790, upload-time = "2025-08-10T21:26:02.009Z" }, - { url = "https://files.pythonhosted.org/packages/69/91/5dcaa134568202397fa4023d7066d4318dc852b53b428052cd914faa05e1/coverage-7.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a374d4e923814e8b72b205ef6b3d3a647bb50e66f3558582eda074c976923613", size = 247907, upload-time = "2025-08-10T21:26:03.757Z" }, - { url = "https://files.pythonhosted.org/packages/38/ed/70c0e871cdfef75f27faceada461206c1cc2510c151e1ef8d60a6fedda39/coverage-7.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:daeefff05993e5e8c6e7499a8508e7bd94502b6b9a9159c84fd1fe6bce3151cb", size = 249344, upload-time = "2025-08-10T21:26:05.11Z" }, - { url = "https://files.pythonhosted.org/packages/5f/55/c8a273ed503cedc07f8a00dcd843daf28e849f0972e4c6be4c027f418ad6/coverage-7.10.3-cp312-cp312-win32.whl", hash = "sha256:187ecdcac21f9636d570e419773df7bd2fda2e7fa040f812e7f95d0bddf5f79a", size = 218693, upload-time = "2025-08-10T21:26:06.534Z" }, - { url = "https://files.pythonhosted.org/packages/94/58/dd3cfb2473b85be0b6eb8c5b6d80b6fc3f8f23611e69ef745cef8cf8bad5/coverage-7.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:4a50ad2524ee7e4c2a95e60d2b0b83283bdfc745fe82359d567e4f15d3823eb5", size = 219501, upload-time = "2025-08-10T21:26:08.195Z" }, - { url = "https://files.pythonhosted.org/packages/56/af/7cbcbf23d46de6f24246e3f76b30df099d05636b30c53c158a196f7da3ad/coverage-7.10.3-cp312-cp312-win_arm64.whl", hash = "sha256:c112f04e075d3495fa3ed2200f71317da99608cbb2e9345bdb6de8819fc30571", size = 218135, upload-time = "2025-08-10T21:26:09.584Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ff/239e4de9cc149c80e9cc359fab60592365b8c4cbfcad58b8a939d18c6898/coverage-7.10.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b99e87304ffe0eb97c5308447328a584258951853807afdc58b16143a530518a", size = 216298, upload-time = "2025-08-10T21:26:10.973Z" }, - { url = "https://files.pythonhosted.org/packages/56/da/28717da68f8ba68f14b9f558aaa8f3e39ada8b9a1ae4f4977c8f98b286d5/coverage-7.10.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4af09c7574d09afbc1ea7da9dcea23665c01f3bc1b1feb061dac135f98ffc53a", size = 216546, upload-time = "2025-08-10T21:26:12.616Z" }, - { url = "https://files.pythonhosted.org/packages/de/bb/e1ade16b9e3f2d6c323faeb6bee8e6c23f3a72760a5d9af102ef56a656cb/coverage-7.10.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:488e9b50dc5d2aa9521053cfa706209e5acf5289e81edc28291a24f4e4488f46", size = 247538, upload-time = "2025-08-10T21:26:14.455Z" }, - { url = "https://files.pythonhosted.org/packages/ea/2f/6ae1db51dc34db499bfe340e89f79a63bd115fc32513a7bacdf17d33cd86/coverage-7.10.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:913ceddb4289cbba3a310704a424e3fb7aac2bc0c3a23ea473193cb290cf17d4", size = 250141, upload-time = "2025-08-10T21:26:15.787Z" }, - { url = "https://files.pythonhosted.org/packages/4f/ed/33efd8819895b10c66348bf26f011dd621e804866c996ea6893d682218df/coverage-7.10.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b1f91cbc78c7112ab84ed2a8defbccd90f888fcae40a97ddd6466b0bec6ae8a", size = 251415, upload-time = "2025-08-10T21:26:17.535Z" }, - { url = "https://files.pythonhosted.org/packages/26/04/cb83826f313d07dc743359c9914d9bc460e0798da9a0e38b4f4fabc207ed/coverage-7.10.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0bac054d45af7cd938834b43a9878b36ea92781bcb009eab040a5b09e9927e3", size = 249575, upload-time = "2025-08-10T21:26:18.921Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fd/ae963c7a8e9581c20fa4355ab8940ca272554d8102e872dbb932a644e410/coverage-7.10.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fe72cbdd12d9e0f4aca873fa6d755e103888a7f9085e4a62d282d9d5b9f7928c", size = 247466, upload-time = "2025-08-10T21:26:20.263Z" }, - { url = "https://files.pythonhosted.org/packages/99/e8/b68d1487c6af370b8d5ef223c6d7e250d952c3acfbfcdbf1a773aa0da9d2/coverage-7.10.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c1e2e927ab3eadd7c244023927d646e4c15c65bb2ac7ae3c3e9537c013700d21", size = 249084, upload-time = "2025-08-10T21:26:21.638Z" }, - { url = "https://files.pythonhosted.org/packages/66/4d/a0bcb561645c2c1e21758d8200443669d6560d2a2fb03955291110212ec4/coverage-7.10.3-cp313-cp313-win32.whl", hash = "sha256:24d0c13de473b04920ddd6e5da3c08831b1170b8f3b17461d7429b61cad59ae0", size = 218735, upload-time = "2025-08-10T21:26:23.009Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c3/78b4adddbc0feb3b223f62761e5f9b4c5a758037aaf76e0a5845e9e35e48/coverage-7.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:3564aae76bce4b96e2345cf53b4c87e938c4985424a9be6a66ee902626edec4c", size = 219531, upload-time = "2025-08-10T21:26:24.474Z" }, - { url = "https://files.pythonhosted.org/packages/70/1b/1229c0b2a527fa5390db58d164aa896d513a1fbb85a1b6b6676846f00552/coverage-7.10.3-cp313-cp313-win_arm64.whl", hash = "sha256:f35580f19f297455f44afcd773c9c7a058e52eb6eb170aa31222e635f2e38b87", size = 218162, upload-time = "2025-08-10T21:26:25.847Z" }, - { url = "https://files.pythonhosted.org/packages/fc/26/1c1f450e15a3bf3eaecf053ff64538a2612a23f05b21d79ce03be9ff5903/coverage-7.10.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07009152f497a0464ffdf2634586787aea0e69ddd023eafb23fc38267db94b84", size = 217003, upload-time = "2025-08-10T21:26:27.231Z" }, - { url = "https://files.pythonhosted.org/packages/29/96/4b40036181d8c2948454b458750960956a3c4785f26a3c29418bbbee1666/coverage-7.10.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd2ba5f0c7e7e8cc418be2f0c14c4d9e3f08b8fb8e4c0f83c2fe87d03eb655e", size = 217238, upload-time = "2025-08-10T21:26:28.83Z" }, - { url = "https://files.pythonhosted.org/packages/62/23/8dfc52e95da20957293fb94d97397a100e63095ec1e0ef5c09dd8c6f591a/coverage-7.10.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1ae22b97003c74186e034a93e4f946c75fad8c0ce8d92fbbc168b5e15ee2841f", size = 258561, upload-time = "2025-08-10T21:26:30.475Z" }, - { url = "https://files.pythonhosted.org/packages/59/95/00e7fcbeda3f632232f4c07dde226afe3511a7781a000aa67798feadc535/coverage-7.10.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:eb329f1046888a36b1dc35504d3029e1dd5afe2196d94315d18c45ee380f67d5", size = 260735, upload-time = "2025-08-10T21:26:32.333Z" }, - { url = "https://files.pythonhosted.org/packages/9e/4c/f4666cbc4571804ba2a65b078ff0de600b0b577dc245389e0bc9b69ae7ca/coverage-7.10.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce01048199a91f07f96ca3074b0c14021f4fe7ffd29a3e6a188ac60a5c3a4af8", size = 262960, upload-time = "2025-08-10T21:26:33.701Z" }, - { url = "https://files.pythonhosted.org/packages/c1/a5/8a9e8a7b12a290ed98b60f73d1d3e5e9ced75a4c94a0d1a671ce3ddfff2a/coverage-7.10.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08b989a06eb9dfacf96d42b7fb4c9a22bafa370d245dc22fa839f2168c6f9fa1", size = 260515, upload-time = "2025-08-10T21:26:35.16Z" }, - { url = "https://files.pythonhosted.org/packages/86/11/bb59f7f33b2cac0c5b17db0d9d0abba9c90d9eda51a6e727b43bd5fce4ae/coverage-7.10.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:669fe0d4e69c575c52148511029b722ba8d26e8a3129840c2ce0522e1452b256", size = 258278, upload-time = "2025-08-10T21:26:36.539Z" }, - { url = "https://files.pythonhosted.org/packages/cc/22/3646f8903743c07b3e53fded0700fed06c580a980482f04bf9536657ac17/coverage-7.10.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3262d19092771c83f3413831d9904b1ccc5f98da5de4ffa4ad67f5b20c7aaf7b", size = 259408, upload-time = "2025-08-10T21:26:37.954Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5c/6375e9d905da22ddea41cd85c30994b8b6f6c02e44e4c5744b76d16b026f/coverage-7.10.3-cp313-cp313t-win32.whl", hash = "sha256:cc0ee4b2ccd42cab7ee6be46d8a67d230cb33a0a7cd47a58b587a7063b6c6b0e", size = 219396, upload-time = "2025-08-10T21:26:39.426Z" }, - { url = "https://files.pythonhosted.org/packages/33/3b/7da37fd14412b8c8b6e73c3e7458fef6b1b05a37f990a9776f88e7740c89/coverage-7.10.3-cp313-cp313t-win_amd64.whl", hash = "sha256:03db599f213341e2960430984e04cf35fb179724e052a3ee627a068653cf4a7c", size = 220458, upload-time = "2025-08-10T21:26:40.905Z" }, - { url = "https://files.pythonhosted.org/packages/28/cc/59a9a70f17edab513c844ee7a5c63cf1057041a84cc725b46a51c6f8301b/coverage-7.10.3-cp313-cp313t-win_arm64.whl", hash = "sha256:46eae7893ba65f53c71284585a262f083ef71594f05ec5c85baf79c402369098", size = 218722, upload-time = "2025-08-10T21:26:42.362Z" }, - { url = "https://files.pythonhosted.org/packages/2d/84/bb773b51a06edbf1231b47dc810a23851f2796e913b335a0fa364773b842/coverage-7.10.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:bce8b8180912914032785850d8f3aacb25ec1810f5f54afc4a8b114e7a9b55de", size = 216280, upload-time = "2025-08-10T21:26:44.132Z" }, - { url = "https://files.pythonhosted.org/packages/92/a8/4d8ca9c111d09865f18d56facff64d5fa076a5593c290bd1cfc5dceb8dba/coverage-7.10.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:07790b4b37d56608536f7c1079bd1aa511567ac2966d33d5cec9cf520c50a7c8", size = 216557, upload-time = "2025-08-10T21:26:45.598Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b2/eb668bfc5060194bc5e1ccd6f664e8e045881cfee66c42a2aa6e6c5b26e8/coverage-7.10.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e79367ef2cd9166acedcbf136a458dfe9a4a2dd4d1ee95738fb2ee581c56f667", size = 247598, upload-time = "2025-08-10T21:26:47.081Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b0/9faa4ac62c8822219dd83e5d0e73876398af17d7305968aed8d1606d1830/coverage-7.10.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:419d2a0f769f26cb1d05e9ccbc5eab4cb5d70231604d47150867c07822acbdf4", size = 250131, upload-time = "2025-08-10T21:26:48.65Z" }, - { url = "https://files.pythonhosted.org/packages/4e/90/203537e310844d4bf1bdcfab89c1e05c25025c06d8489b9e6f937ad1a9e2/coverage-7.10.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee221cf244757cdc2ac882e3062ab414b8464ad9c884c21e878517ea64b3fa26", size = 251485, upload-time = "2025-08-10T21:26:50.368Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b2/9d894b26bc53c70a1fe503d62240ce6564256d6d35600bdb86b80e516e7d/coverage-7.10.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c2079d8cdd6f7373d628e14b3357f24d1db02c9dc22e6a007418ca7a2be0435a", size = 249488, upload-time = "2025-08-10T21:26:52.045Z" }, - { url = "https://files.pythonhosted.org/packages/b4/28/af167dbac5281ba6c55c933a0ca6675d68347d5aee39cacc14d44150b922/coverage-7.10.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:bd8df1f83c0703fa3ca781b02d36f9ec67ad9cb725b18d486405924f5e4270bd", size = 247419, upload-time = "2025-08-10T21:26:53.533Z" }, - { url = "https://files.pythonhosted.org/packages/f4/1c/9a4ddc9f0dcb150d4cd619e1c4bb39bcf694c6129220bdd1e5895d694dda/coverage-7.10.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6b4e25e0fa335c8aa26e42a52053f3786a61cc7622b4d54ae2dad994aa754fec", size = 248917, upload-time = "2025-08-10T21:26:55.11Z" }, - { url = "https://files.pythonhosted.org/packages/92/27/c6a60c7cbe10dbcdcd7fc9ee89d531dc04ea4c073800279bb269954c5a9f/coverage-7.10.3-cp314-cp314-win32.whl", hash = "sha256:d7c3d02c2866deb217dce664c71787f4b25420ea3eaf87056f44fb364a3528f5", size = 218999, upload-time = "2025-08-10T21:26:56.637Z" }, - { url = "https://files.pythonhosted.org/packages/36/09/a94c1369964ab31273576615d55e7d14619a1c47a662ed3e2a2fe4dee7d4/coverage-7.10.3-cp314-cp314-win_amd64.whl", hash = "sha256:9c8916d44d9e0fe6cdb2227dc6b0edd8bc6c8ef13438bbbf69af7482d9bb9833", size = 219801, upload-time = "2025-08-10T21:26:58.207Z" }, - { url = "https://files.pythonhosted.org/packages/23/59/f5cd2a80f401c01cf0f3add64a7b791b7d53fd6090a4e3e9ea52691cf3c4/coverage-7.10.3-cp314-cp314-win_arm64.whl", hash = "sha256:1007d6a2b3cf197c57105cc1ba390d9ff7f0bee215ced4dea530181e49c65ab4", size = 218381, upload-time = "2025-08-10T21:26:59.707Z" }, - { url = "https://files.pythonhosted.org/packages/73/3d/89d65baf1ea39e148ee989de6da601469ba93c1d905b17dfb0b83bd39c96/coverage-7.10.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ebc8791d346410d096818788877d675ca55c91db87d60e8f477bd41c6970ffc6", size = 217019, upload-time = "2025-08-10T21:27:01.242Z" }, - { url = "https://files.pythonhosted.org/packages/7d/7d/d9850230cd9c999ce3a1e600f85c2fff61a81c301334d7a1faa1a5ba19c8/coverage-7.10.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f4e4d8e75f6fd3c6940ebeed29e3d9d632e1f18f6fb65d33086d99d4d073241", size = 217237, upload-time = "2025-08-10T21:27:03.442Z" }, - { url = "https://files.pythonhosted.org/packages/36/51/b87002d417202ab27f4a1cd6bd34ee3b78f51b3ddbef51639099661da991/coverage-7.10.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:24581ed69f132b6225a31b0228ae4885731cddc966f8a33fe5987288bdbbbd5e", size = 258735, upload-time = "2025-08-10T21:27:05.124Z" }, - { url = "https://files.pythonhosted.org/packages/1c/02/1f8612bfcb46fc7ca64a353fff1cd4ed932bb6e0b4e0bb88b699c16794b8/coverage-7.10.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec151569ddfccbf71bac8c422dce15e176167385a00cd86e887f9a80035ce8a5", size = 260901, upload-time = "2025-08-10T21:27:06.68Z" }, - { url = "https://files.pythonhosted.org/packages/aa/3a/fe39e624ddcb2373908bd922756384bb70ac1c5009b0d1674eb326a3e428/coverage-7.10.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2ae8e7c56290b908ee817200c0b65929b8050bc28530b131fe7c6dfee3e7d86b", size = 263157, upload-time = "2025-08-10T21:27:08.398Z" }, - { url = "https://files.pythonhosted.org/packages/5e/89/496b6d5a10fa0d0691a633bb2b2bcf4f38f0bdfcbde21ad9e32d1af328ed/coverage-7.10.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fb742309766d7e48e9eb4dc34bc95a424707bc6140c0e7d9726e794f11b92a0", size = 260597, upload-time = "2025-08-10T21:27:10.237Z" }, - { url = "https://files.pythonhosted.org/packages/b6/a6/8b5bf6a9e8c6aaeb47d5fe9687014148efc05c3588110246d5fdeef9b492/coverage-7.10.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:c65e2a5b32fbe1e499f1036efa6eb9cb4ea2bf6f7168d0e7a5852f3024f471b1", size = 258353, upload-time = "2025-08-10T21:27:11.773Z" }, - { url = "https://files.pythonhosted.org/packages/c3/6d/ad131be74f8afd28150a07565dfbdc86592fd61d97e2dc83383d9af219f0/coverage-7.10.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d48d2cb07d50f12f4f18d2bb75d9d19e3506c26d96fffabf56d22936e5ed8f7c", size = 259504, upload-time = "2025-08-10T21:27:13.254Z" }, - { url = "https://files.pythonhosted.org/packages/ec/30/fc9b5097092758cba3375a8cc4ff61774f8cd733bcfb6c9d21a60077a8d8/coverage-7.10.3-cp314-cp314t-win32.whl", hash = "sha256:dec0d9bc15ee305e09fe2cd1911d3f0371262d3cfdae05d79515d8cb712b4869", size = 219782, upload-time = "2025-08-10T21:27:14.736Z" }, - { url = "https://files.pythonhosted.org/packages/72/9b/27fbf79451b1fac15c4bda6ec6e9deae27cf7c0648c1305aa21a3454f5c4/coverage-7.10.3-cp314-cp314t-win_amd64.whl", hash = "sha256:424ea93a323aa0f7f01174308ea78bde885c3089ec1bef7143a6d93c3e24ef64", size = 220898, upload-time = "2025-08-10T21:27:16.297Z" }, - { url = "https://files.pythonhosted.org/packages/d1/cf/a32bbf92869cbf0b7c8b84325327bfc718ad4b6d2c63374fef3d58e39306/coverage-7.10.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f5983c132a62d93d71c9ef896a0b9bf6e6828d8d2ea32611f58684fba60bba35", size = 218922, upload-time = "2025-08-10T21:27:18.22Z" }, - { url = "https://files.pythonhosted.org/packages/f1/66/c06f4a93c65b6fc6578ef4f1fe51f83d61fc6f2a74ec0ce434ed288d834a/coverage-7.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:da749daa7e141985487e1ff90a68315b0845930ed53dc397f4ae8f8bab25b551", size = 215951, upload-time = "2025-08-10T21:27:19.815Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ea/cc18c70a6f72f8e4def212eaebd8388c64f29608da10b3c38c8ec76f5e49/coverage-7.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3126fb6a47d287f461d9b1aa5d1a8c97034d1dffb4f452f2cf211289dae74ef", size = 216335, upload-time = "2025-08-10T21:27:21.737Z" }, - { url = "https://files.pythonhosted.org/packages/f2/fb/9c6d1d67c6d54b149f06b9f374bc9ca03e4d7d7784c8cfd12ceda20e3787/coverage-7.10.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3da794db13cc27ca40e1ec8127945b97fab78ba548040047d54e7bfa6d442dca", size = 242772, upload-time = "2025-08-10T21:27:23.884Z" }, - { url = "https://files.pythonhosted.org/packages/5a/e5/4223bdb28b992a19a13ab1410c761e2bfe92ca1e7bba8e85ee2024eeda85/coverage-7.10.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4e27bebbd184ef8d1c1e092b74a2b7109dcbe2618dce6e96b1776d53b14b3fe8", size = 244596, upload-time = "2025-08-10T21:27:25.842Z" }, - { url = "https://files.pythonhosted.org/packages/d2/13/d646ba28613669d487c654a760571c10128247d12d9f50e93f69542679a2/coverage-7.10.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8fd4ee2580b9fefbd301b4f8f85b62ac90d1e848bea54f89a5748cf132782118", size = 246370, upload-time = "2025-08-10T21:27:27.503Z" }, - { url = "https://files.pythonhosted.org/packages/02/7c/aff99c67d8c383142b0877ee435caf493765356336211c4899257325d6c7/coverage-7.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6999920bdd73259ce11cabfc1307484f071ecc6abdb2ca58d98facbcefc70f16", size = 244254, upload-time = "2025-08-10T21:27:29.357Z" }, - { url = "https://files.pythonhosted.org/packages/b0/13/a51ea145ed51ddfa8717bb29926d9111aca343fab38f04692a843d50be6b/coverage-7.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3623f929db885fab100cb88220a5b193321ed37e03af719efdbaf5d10b6e227", size = 242325, upload-time = "2025-08-10T21:27:30.931Z" }, - { url = "https://files.pythonhosted.org/packages/d8/4b/6119be0089c89ad49d2e5a508d55a1485c878642b706a7f95b26e299137d/coverage-7.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:25b902c5e15dea056485d782e420bb84621cc08ee75d5131ecb3dbef8bd1365f", size = 243281, upload-time = "2025-08-10T21:27:32.815Z" }, - { url = "https://files.pythonhosted.org/packages/34/c8/1b2e7e53eee4bc1304e56e10361b08197a77a26ceb07201dcc9e759ef132/coverage-7.10.3-cp39-cp39-win32.whl", hash = "sha256:f930a4d92b004b643183451fe9c8fe398ccf866ed37d172ebaccfd443a097f61", size = 218489, upload-time = "2025-08-10T21:27:34.905Z" }, - { url = "https://files.pythonhosted.org/packages/dd/1e/9c0c230a199809c39e2dff0f1f889dfb04dcd07d83c1c26a8ef671660e08/coverage-7.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:08e638a93c8acba13c7842953f92a33d52d73e410329acd472280d2a21a6c0e1", size = 219396, upload-time = "2025-08-10T21:27:36.61Z" }, - { url = "https://files.pythonhosted.org/packages/84/19/e67f4ae24e232c7f713337f3f4f7c9c58afd0c02866fb07c7b9255a19ed7/coverage-7.10.3-py3-none-any.whl", hash = "sha256:416a8d74dc0adfd33944ba2f405897bab87b7e9e84a391e09d241956bd953ce1", size = 207921, upload-time = "2025-08-10T21:27:38.254Z" }, +version = "7.10.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/4e/08b493f1f1d8a5182df0044acc970799b58a8d289608e0d891a03e9d269a/coverage-7.10.4.tar.gz", hash = "sha256:25f5130af6c8e7297fd14634955ba9e1697f47143f289e2a23284177c0061d27", size = 823798, upload-time = "2025-08-17T00:26:43.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/f4/350759710db50362685f922259c140592dba15eb4e2325656a98413864d9/coverage-7.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d92d6edb0ccafd20c6fbf9891ca720b39c2a6a4b4a6f9cf323ca2c986f33e475", size = 216403, upload-time = "2025-08-17T00:24:19.083Z" }, + { url = "https://files.pythonhosted.org/packages/29/7e/e467c2bb4d5ecfd166bfd22c405cce4c50de2763ba1d78e2729c59539a42/coverage-7.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7202da14dc0236884fcc45665ffb2d79d4991a53fbdf152ab22f69f70923cc22", size = 216802, upload-time = "2025-08-17T00:24:21.824Z" }, + { url = "https://files.pythonhosted.org/packages/62/ab/2accdd1ccfe63b890e5eb39118f63c155202df287798364868a2884a50af/coverage-7.10.4-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ada418633ae24ec8d0fcad5efe6fc7aa3c62497c6ed86589e57844ad04365674", size = 243558, upload-time = "2025-08-17T00:24:23.569Z" }, + { url = "https://files.pythonhosted.org/packages/43/04/c14c33d0cfc0f4db6b3504d01a47f4c798563d932a836fd5f2dbc0521d3d/coverage-7.10.4-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b828e33eca6c3322adda3b5884456f98c435182a44917ded05005adfa1415500", size = 245370, upload-time = "2025-08-17T00:24:24.858Z" }, + { url = "https://files.pythonhosted.org/packages/99/71/147053061f1f51c1d3b3d040c3cb26876964a3a0dca0765d2441411ca568/coverage-7.10.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:802793ba397afcfdbe9f91f89d65ae88b958d95edc8caf948e1f47d8b6b2b606", size = 247228, upload-time = "2025-08-17T00:24:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/cc/92/7ef882205d4d4eb502e6154ee7122c1a1b1ce3f29d0166921e0fb550a5d3/coverage-7.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d0b23512338c54101d3bf7a1ab107d9d75abda1d5f69bc0887fd079253e4c27e", size = 245270, upload-time = "2025-08-17T00:24:27.424Z" }, + { url = "https://files.pythonhosted.org/packages/ab/3d/297a20603abcc6c7d89d801286eb477b0b861f3c5a4222730f1c9837be3e/coverage-7.10.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f36b7dcf72d06a8c5e2dd3aca02be2b1b5db5f86404627dff834396efce958f2", size = 243287, upload-time = "2025-08-17T00:24:28.697Z" }, + { url = "https://files.pythonhosted.org/packages/65/f9/b04111438f41f1ddd5dc88706d5f8064ae5bb962203c49fe417fa23a362d/coverage-7.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fce316c367a1dc2c411821365592eeb335ff1781956d87a0410eae248188ba51", size = 244164, upload-time = "2025-08-17T00:24:30.393Z" }, + { url = "https://files.pythonhosted.org/packages/1e/e5/c7d9eb7a9ea66cf92d069077719fb2b07782dcd7050b01a9b88766b52154/coverage-7.10.4-cp310-cp310-win32.whl", hash = "sha256:8c5dab29fc8070b3766b5fc85f8d89b19634584429a2da6d42da5edfadaf32ae", size = 218917, upload-time = "2025-08-17T00:24:31.67Z" }, + { url = "https://files.pythonhosted.org/packages/66/30/4d9d3b81f5a836b31a7428b8a25e6d490d4dca5ff2952492af130153c35c/coverage-7.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:4b0d114616f0fccb529a1817457d5fb52a10e106f86c5fb3b0bd0d45d0d69b93", size = 219822, upload-time = "2025-08-17T00:24:32.89Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ba/2c9817e62018e7d480d14f684c160b3038df9ff69c5af7d80e97d143e4d1/coverage-7.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:05d5f98ec893d4a2abc8bc5f046f2f4367404e7e5d5d18b83de8fde1093ebc4f", size = 216514, upload-time = "2025-08-17T00:24:34.188Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/093412a959a6b6261446221ba9fb23bb63f661a5de70b5d130763c87f916/coverage-7.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9267efd28f8994b750d171e58e481e3bbd69e44baed540e4c789f8e368b24b88", size = 216914, upload-time = "2025-08-17T00:24:35.881Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1f/2fdf4a71cfe93b07eae845ebf763267539a7d8b7e16b062f959d56d7e433/coverage-7.10.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4456a039fdc1a89ea60823d0330f1ac6f97b0dbe9e2b6fb4873e889584b085fb", size = 247308, upload-time = "2025-08-17T00:24:37.61Z" }, + { url = "https://files.pythonhosted.org/packages/ba/16/33f6cded458e84f008b9f6bc379609a6a1eda7bffe349153b9960803fc11/coverage-7.10.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c2bfbd2a9f7e68a21c5bd191be94bfdb2691ac40d325bac9ef3ae45ff5c753d9", size = 249241, upload-time = "2025-08-17T00:24:38.919Z" }, + { url = "https://files.pythonhosted.org/packages/84/98/9c18e47c889be58339ff2157c63b91a219272503ee32b49d926eea2337f2/coverage-7.10.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ab7765f10ae1df7e7fe37de9e64b5a269b812ee22e2da3f84f97b1c7732a0d8", size = 251346, upload-time = "2025-08-17T00:24:40.507Z" }, + { url = "https://files.pythonhosted.org/packages/6d/07/00a6c0d53e9a22d36d8e95ddd049b860eef8f4b9fd299f7ce34d8e323356/coverage-7.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a09b13695166236e171ec1627ff8434b9a9bae47528d0ba9d944c912d33b3d2", size = 249037, upload-time = "2025-08-17T00:24:41.904Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0e/1e1b944d6a6483d07bab5ef6ce063fcf3d0cc555a16a8c05ebaab11f5607/coverage-7.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5c9e75dfdc0167d5675e9804f04a56b2cf47fb83a524654297000b578b8adcb7", size = 247090, upload-time = "2025-08-17T00:24:43.193Z" }, + { url = "https://files.pythonhosted.org/packages/62/43/2ce5ab8a728b8e25ced077111581290ffaef9efaf860a28e25435ab925cf/coverage-7.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c751261bfe6481caba15ec005a194cb60aad06f29235a74c24f18546d8377df0", size = 247732, upload-time = "2025-08-17T00:24:44.906Z" }, + { url = "https://files.pythonhosted.org/packages/a4/f3/706c4a24f42c1c5f3a2ca56637ab1270f84d9e75355160dc34d5e39bb5b7/coverage-7.10.4-cp311-cp311-win32.whl", hash = "sha256:051c7c9e765f003c2ff6e8c81ccea28a70fb5b0142671e4e3ede7cebd45c80af", size = 218961, upload-time = "2025-08-17T00:24:46.241Z" }, + { url = "https://files.pythonhosted.org/packages/e8/aa/6b9ea06e0290bf1cf2a2765bba89d561c5c563b4e9db8298bf83699c8b67/coverage-7.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:1a647b152f10be08fb771ae4a1421dbff66141e3d8ab27d543b5eb9ea5af8e52", size = 219851, upload-time = "2025-08-17T00:24:48.795Z" }, + { url = "https://files.pythonhosted.org/packages/8b/be/f0dc9ad50ee183369e643cd7ed8f2ef5c491bc20b4c3387cbed97dd6e0d1/coverage-7.10.4-cp311-cp311-win_arm64.whl", hash = "sha256:b09b9e4e1de0d406ca9f19a371c2beefe3193b542f64a6dd40cfcf435b7d6aa0", size = 218530, upload-time = "2025-08-17T00:24:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/9e/4a/781c9e4dd57cabda2a28e2ce5b00b6be416015265851060945a5ed4bd85e/coverage-7.10.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a1f0264abcabd4853d4cb9b3d164adbf1565da7dab1da1669e93f3ea60162d79", size = 216706, upload-time = "2025-08-17T00:24:51.528Z" }, + { url = "https://files.pythonhosted.org/packages/6a/8c/51255202ca03d2e7b664770289f80db6f47b05138e06cce112b3957d5dfd/coverage-7.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:536cbe6b118a4df231b11af3e0f974a72a095182ff8ec5f4868c931e8043ef3e", size = 216939, upload-time = "2025-08-17T00:24:53.171Z" }, + { url = "https://files.pythonhosted.org/packages/06/7f/df11131483698660f94d3c847dc76461369782d7a7644fcd72ac90da8fd0/coverage-7.10.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9a4c0d84134797b7bf3f080599d0cd501471f6c98b715405166860d79cfaa97e", size = 248429, upload-time = "2025-08-17T00:24:54.934Z" }, + { url = "https://files.pythonhosted.org/packages/eb/fa/13ac5eda7300e160bf98f082e75f5c5b4189bf3a883dd1ee42dbedfdc617/coverage-7.10.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7c155fc0f9cee8c9803ea0ad153ab6a3b956baa5d4cd993405dc0b45b2a0b9e0", size = 251178, upload-time = "2025-08-17T00:24:56.353Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bc/f63b56a58ad0bec68a840e7be6b7ed9d6f6288d790760647bb88f5fea41e/coverage-7.10.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5f2ab6e451d4b07855d8bcf063adf11e199bff421a4ba57f5bb95b7444ca62", size = 252313, upload-time = "2025-08-17T00:24:57.692Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b6/79338f1ea27b01266f845afb4485976211264ab92407d1c307babe3592a7/coverage-7.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:685b67d99b945b0c221be0780c336b303a7753b3e0ec0d618c795aada25d5e7a", size = 250230, upload-time = "2025-08-17T00:24:59.293Z" }, + { url = "https://files.pythonhosted.org/packages/bc/93/3b24f1da3e0286a4dc5832427e1d448d5296f8287464b1ff4a222abeeeb5/coverage-7.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0c079027e50c2ae44da51c2e294596cbc9dbb58f7ca45b30651c7e411060fc23", size = 248351, upload-time = "2025-08-17T00:25:00.676Z" }, + { url = "https://files.pythonhosted.org/packages/de/5f/d59412f869e49dcc5b89398ef3146c8bfaec870b179cc344d27932e0554b/coverage-7.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3749aa72b93ce516f77cf5034d8e3c0dfd45c6e8a163a602ede2dc5f9a0bb927", size = 249788, upload-time = "2025-08-17T00:25:02.354Z" }, + { url = "https://files.pythonhosted.org/packages/cc/52/04a3b733f40a0cc7c4a5b9b010844111dbf906df3e868b13e1ce7b39ac31/coverage-7.10.4-cp312-cp312-win32.whl", hash = "sha256:fecb97b3a52fa9bcd5a7375e72fae209088faf671d39fae67261f37772d5559a", size = 219131, upload-time = "2025-08-17T00:25:03.79Z" }, + { url = "https://files.pythonhosted.org/packages/83/dd/12909fc0b83888197b3ec43a4ac7753589591c08d00d9deda4158df2734e/coverage-7.10.4-cp312-cp312-win_amd64.whl", hash = "sha256:26de58f355626628a21fe6a70e1e1fad95702dafebfb0685280962ae1449f17b", size = 219939, upload-time = "2025-08-17T00:25:05.494Z" }, + { url = "https://files.pythonhosted.org/packages/83/c7/058bb3220fdd6821bada9685eadac2940429ab3c97025ce53549ff423cc1/coverage-7.10.4-cp312-cp312-win_arm64.whl", hash = "sha256:67e8885408f8325198862bc487038a4980c9277d753cb8812510927f2176437a", size = 218572, upload-time = "2025-08-17T00:25:06.897Z" }, + { url = "https://files.pythonhosted.org/packages/46/b0/4a3662de81f2ed792a4e425d59c4ae50d8dd1d844de252838c200beed65a/coverage-7.10.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b8e1d2015d5dfdbf964ecef12944c0c8c55b885bb5c0467ae8ef55e0e151233", size = 216735, upload-time = "2025-08-17T00:25:08.617Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e8/e2dcffea01921bfffc6170fb4406cffb763a3b43a047bbd7923566708193/coverage-7.10.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:25735c299439018d66eb2dccf54f625aceb78645687a05f9f848f6e6c751e169", size = 216982, upload-time = "2025-08-17T00:25:10.384Z" }, + { url = "https://files.pythonhosted.org/packages/9d/59/cc89bb6ac869704d2781c2f5f7957d07097c77da0e8fdd4fd50dbf2ac9c0/coverage-7.10.4-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:715c06cb5eceac4d9b7cdf783ce04aa495f6aff657543fea75c30215b28ddb74", size = 247981, upload-time = "2025-08-17T00:25:11.854Z" }, + { url = "https://files.pythonhosted.org/packages/aa/23/3da089aa177ceaf0d3f96754ebc1318597822e6387560914cc480086e730/coverage-7.10.4-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e017ac69fac9aacd7df6dc464c05833e834dc5b00c914d7af9a5249fcccf07ef", size = 250584, upload-time = "2025-08-17T00:25:13.483Z" }, + { url = "https://files.pythonhosted.org/packages/ad/82/e8693c368535b4e5fad05252a366a1794d481c79ae0333ed943472fd778d/coverage-7.10.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bad180cc40b3fccb0f0e8c702d781492654ac2580d468e3ffc8065e38c6c2408", size = 251856, upload-time = "2025-08-17T00:25:15.27Z" }, + { url = "https://files.pythonhosted.org/packages/56/19/8b9cb13292e602fa4135b10a26ac4ce169a7fc7c285ff08bedd42ff6acca/coverage-7.10.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:becbdcd14f685fada010a5f792bf0895675ecf7481304fe159f0cd3f289550bd", size = 250015, upload-time = "2025-08-17T00:25:16.759Z" }, + { url = "https://files.pythonhosted.org/packages/10/e7/e5903990ce089527cf1c4f88b702985bd65c61ac245923f1ff1257dbcc02/coverage-7.10.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0b485ca21e16a76f68060911f97ebbe3e0d891da1dbbce6af7ca1ab3f98b9097", size = 247908, upload-time = "2025-08-17T00:25:18.232Z" }, + { url = "https://files.pythonhosted.org/packages/dd/c9/7d464f116df1df7fe340669af1ddbe1a371fc60f3082ff3dc837c4f1f2ab/coverage-7.10.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6c1d098ccfe8e1e0a1ed9a0249138899948afd2978cbf48eb1cc3fcd38469690", size = 249525, upload-time = "2025-08-17T00:25:20.141Z" }, + { url = "https://files.pythonhosted.org/packages/ce/42/722e0cdbf6c19e7235c2020837d4e00f3b07820fd012201a983238cc3a30/coverage-7.10.4-cp313-cp313-win32.whl", hash = "sha256:8630f8af2ca84b5c367c3df907b1706621abe06d6929f5045fd628968d421e6e", size = 219173, upload-time = "2025-08-17T00:25:21.56Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/aa70366f8275955cd51fa1ed52a521c7fcebcc0fc279f53c8c1ee6006dfe/coverage-7.10.4-cp313-cp313-win_amd64.whl", hash = "sha256:f68835d31c421736be367d32f179e14ca932978293fe1b4c7a6a49b555dff5b2", size = 219969, upload-time = "2025-08-17T00:25:23.501Z" }, + { url = "https://files.pythonhosted.org/packages/ac/96/c39d92d5aad8fec28d4606556bfc92b6fee0ab51e4a548d9b49fb15a777c/coverage-7.10.4-cp313-cp313-win_arm64.whl", hash = "sha256:6eaa61ff6724ca7ebc5326d1fae062d85e19b38dd922d50903702e6078370ae7", size = 218601, upload-time = "2025-08-17T00:25:25.295Z" }, + { url = "https://files.pythonhosted.org/packages/79/13/34d549a6177bd80fa5db758cb6fd3057b7ad9296d8707d4ab7f480b0135f/coverage-7.10.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:702978108876bfb3d997604930b05fe769462cc3000150b0e607b7b444f2fd84", size = 217445, upload-time = "2025-08-17T00:25:27.129Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c0/433da866359bf39bf595f46d134ff2d6b4293aeea7f3328b6898733b0633/coverage-7.10.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e8f978e8c5521d9c8f2086ac60d931d583fab0a16f382f6eb89453fe998e2484", size = 217676, upload-time = "2025-08-17T00:25:28.641Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d7/2b99aa8737f7801fd95222c79a4ebc8c5dd4460d4bed7ef26b17a60c8d74/coverage-7.10.4-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:df0ac2ccfd19351411c45e43ab60932b74472e4648b0a9edf6a3b58846e246a9", size = 259002, upload-time = "2025-08-17T00:25:30.065Z" }, + { url = "https://files.pythonhosted.org/packages/08/cf/86432b69d57debaef5abf19aae661ba8f4fcd2882fa762e14added4bd334/coverage-7.10.4-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73a0d1aaaa3796179f336448e1576a3de6fc95ff4f07c2d7251d4caf5d18cf8d", size = 261178, upload-time = "2025-08-17T00:25:31.517Z" }, + { url = "https://files.pythonhosted.org/packages/23/78/85176593f4aa6e869cbed7a8098da3448a50e3fac5cb2ecba57729a5220d/coverage-7.10.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:873da6d0ed6b3ffc0bc01f2c7e3ad7e2023751c0d8d86c26fe7322c314b031dc", size = 263402, upload-time = "2025-08-17T00:25:33.339Z" }, + { url = "https://files.pythonhosted.org/packages/88/1d/57a27b6789b79abcac0cc5805b31320d7a97fa20f728a6a7c562db9a3733/coverage-7.10.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c6446c75b0e7dda5daa876a1c87b480b2b52affb972fedd6c22edf1aaf2e00ec", size = 260957, upload-time = "2025-08-17T00:25:34.795Z" }, + { url = "https://files.pythonhosted.org/packages/fa/e5/3e5ddfd42835c6def6cd5b2bdb3348da2e34c08d9c1211e91a49e9fd709d/coverage-7.10.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6e73933e296634e520390c44758d553d3b573b321608118363e52113790633b9", size = 258718, upload-time = "2025-08-17T00:25:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1a/0b/d364f0f7ef111615dc4e05a6ed02cac7b6f2ac169884aa57faeae9eb5fa0/coverage-7.10.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52073d4b08d2cb571234c8a71eb32af3c6923149cf644a51d5957ac128cf6aa4", size = 259848, upload-time = "2025-08-17T00:25:37.754Z" }, + { url = "https://files.pythonhosted.org/packages/10/c6/bbea60a3b309621162e53faf7fac740daaf083048ea22077418e1ecaba3f/coverage-7.10.4-cp313-cp313t-win32.whl", hash = "sha256:e24afb178f21f9ceb1aefbc73eb524769aa9b504a42b26857243f881af56880c", size = 219833, upload-time = "2025-08-17T00:25:39.252Z" }, + { url = "https://files.pythonhosted.org/packages/44/a5/f9f080d49cfb117ddffe672f21eab41bd23a46179a907820743afac7c021/coverage-7.10.4-cp313-cp313t-win_amd64.whl", hash = "sha256:be04507ff1ad206f4be3d156a674e3fb84bbb751ea1b23b142979ac9eebaa15f", size = 220897, upload-time = "2025-08-17T00:25:40.772Z" }, + { url = "https://files.pythonhosted.org/packages/46/89/49a3fc784fa73d707f603e586d84a18c2e7796707044e9d73d13260930b7/coverage-7.10.4-cp313-cp313t-win_arm64.whl", hash = "sha256:f3e3ff3f69d02b5dad67a6eac68cc9c71ae343b6328aae96e914f9f2f23a22e2", size = 219160, upload-time = "2025-08-17T00:25:42.229Z" }, + { url = "https://files.pythonhosted.org/packages/b5/22/525f84b4cbcff66024d29f6909d7ecde97223f998116d3677cfba0d115b5/coverage-7.10.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a59fe0af7dd7211ba595cf7e2867458381f7e5d7b4cffe46274e0b2f5b9f4eb4", size = 216717, upload-time = "2025-08-17T00:25:43.875Z" }, + { url = "https://files.pythonhosted.org/packages/a6/58/213577f77efe44333a416d4bcb251471e7f64b19b5886bb515561b5ce389/coverage-7.10.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3a6c35c5b70f569ee38dc3350cd14fdd0347a8b389a18bb37538cc43e6f730e6", size = 216994, upload-time = "2025-08-17T00:25:45.405Z" }, + { url = "https://files.pythonhosted.org/packages/17/85/34ac02d0985a09472f41b609a1d7babc32df87c726c7612dc93d30679b5a/coverage-7.10.4-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:acb7baf49f513554c4af6ef8e2bd6e8ac74e6ea0c7386df8b3eb586d82ccccc4", size = 248038, upload-time = "2025-08-17T00:25:46.981Z" }, + { url = "https://files.pythonhosted.org/packages/47/4f/2140305ec93642fdaf988f139813629cbb6d8efa661b30a04b6f7c67c31e/coverage-7.10.4-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a89afecec1ed12ac13ed203238b560cbfad3522bae37d91c102e690b8b1dc46c", size = 250575, upload-time = "2025-08-17T00:25:48.613Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b5/41b5784180b82a083c76aeba8f2c72ea1cb789e5382157b7dc852832aea2/coverage-7.10.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:480442727f464407d8ade6e677b7f21f3b96a9838ab541b9a28ce9e44123c14e", size = 251927, upload-time = "2025-08-17T00:25:50.881Z" }, + { url = "https://files.pythonhosted.org/packages/78/ca/c1dd063e50b71f5aea2ebb27a1c404e7b5ecf5714c8b5301f20e4e8831ac/coverage-7.10.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a89bf193707f4a17f1ed461504031074d87f035153239f16ce86dfb8f8c7ac76", size = 249930, upload-time = "2025-08-17T00:25:52.422Z" }, + { url = "https://files.pythonhosted.org/packages/8d/66/d8907408612ffee100d731798e6090aedb3ba766ecf929df296c1a7ee4fb/coverage-7.10.4-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:3ddd912c2fc440f0fb3229e764feec85669d5d80a988ff1b336a27d73f63c818", size = 247862, upload-time = "2025-08-17T00:25:54.316Z" }, + { url = "https://files.pythonhosted.org/packages/29/db/53cd8ec8b1c9c52d8e22a25434785bfc2d1e70c0cfb4d278a1326c87f741/coverage-7.10.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a538944ee3a42265e61c7298aeba9ea43f31c01271cf028f437a7b4075592cf", size = 249360, upload-time = "2025-08-17T00:25:55.833Z" }, + { url = "https://files.pythonhosted.org/packages/4f/75/5ec0a28ae4a0804124ea5a5becd2b0fa3adf30967ac656711fb5cdf67c60/coverage-7.10.4-cp314-cp314-win32.whl", hash = "sha256:fd2e6002be1c62476eb862b8514b1ba7e7684c50165f2a8d389e77da6c9a2ebd", size = 219449, upload-time = "2025-08-17T00:25:57.984Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ab/66e2ee085ec60672bf5250f11101ad8143b81f24989e8c0e575d16bb1e53/coverage-7.10.4-cp314-cp314-win_amd64.whl", hash = "sha256:ec113277f2b5cf188d95fb66a65c7431f2b9192ee7e6ec9b72b30bbfb53c244a", size = 220246, upload-time = "2025-08-17T00:25:59.868Z" }, + { url = "https://files.pythonhosted.org/packages/37/3b/00b448d385f149143190846217797d730b973c3c0ec2045a7e0f5db3a7d0/coverage-7.10.4-cp314-cp314-win_arm64.whl", hash = "sha256:9744954bfd387796c6a091b50d55ca7cac3d08767795b5eec69ad0f7dbf12d38", size = 218825, upload-time = "2025-08-17T00:26:01.44Z" }, + { url = "https://files.pythonhosted.org/packages/ee/2e/55e20d3d1ce00b513efb6fd35f13899e1c6d4f76c6cbcc9851c7227cd469/coverage-7.10.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5af4829904dda6aabb54a23879f0f4412094ba9ef153aaa464e3c1b1c9bc98e6", size = 217462, upload-time = "2025-08-17T00:26:03.014Z" }, + { url = "https://files.pythonhosted.org/packages/47/b3/aab1260df5876f5921e2c57519e73a6f6eeacc0ae451e109d44ee747563e/coverage-7.10.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7bba5ed85e034831fac761ae506c0644d24fd5594727e174b5a73aff343a7508", size = 217675, upload-time = "2025-08-17T00:26:04.606Z" }, + { url = "https://files.pythonhosted.org/packages/67/23/1cfe2aa50c7026180989f0bfc242168ac7c8399ccc66eb816b171e0ab05e/coverage-7.10.4-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d57d555b0719834b55ad35045de6cc80fc2b28e05adb6b03c98479f9553b387f", size = 259176, upload-time = "2025-08-17T00:26:06.159Z" }, + { url = "https://files.pythonhosted.org/packages/9d/72/5882b6aeed3f9de7fc4049874fd7d24213bf1d06882f5c754c8a682606ec/coverage-7.10.4-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ba62c51a72048bb1ea72db265e6bd8beaabf9809cd2125bbb5306c6ce105f214", size = 261341, upload-time = "2025-08-17T00:26:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/1b/70/a0c76e3087596ae155f8e71a49c2c534c58b92aeacaf4d9d0cbbf2dde53b/coverage-7.10.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0acf0c62a6095f07e9db4ec365cc58c0ef5babb757e54745a1aa2ea2a2564af1", size = 263600, upload-time = "2025-08-17T00:26:11.045Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5f/27e4cd4505b9a3c05257fb7fc509acbc778c830c450cb4ace00bf2b7bda7/coverage-7.10.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e1033bf0f763f5cf49ffe6594314b11027dcc1073ac590b415ea93463466deec", size = 261036, upload-time = "2025-08-17T00:26:12.693Z" }, + { url = "https://files.pythonhosted.org/packages/02/d6/cf2ae3a7f90ab226ea765a104c4e76c5126f73c93a92eaea41e1dc6a1892/coverage-7.10.4-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:92c29eff894832b6a40da1789b1f252305af921750b03ee4535919db9179453d", size = 258794, upload-time = "2025-08-17T00:26:14.261Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/39f222eab0d78aa2001cdb7852aa1140bba632db23a5cfd832218b496d6c/coverage-7.10.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:822c4c830989c2093527e92acd97be4638a44eb042b1bdc0e7a278d84a070bd3", size = 259946, upload-time = "2025-08-17T00:26:15.899Z" }, + { url = "https://files.pythonhosted.org/packages/74/b2/49d82acefe2fe7c777436a3097f928c7242a842538b190f66aac01f29321/coverage-7.10.4-cp314-cp314t-win32.whl", hash = "sha256:e694d855dac2e7cf194ba33653e4ba7aad7267a802a7b3fc4347d0517d5d65cd", size = 220226, upload-time = "2025-08-17T00:26:17.566Z" }, + { url = "https://files.pythonhosted.org/packages/06/b0/afb942b6b2fc30bdbc7b05b087beae11c2b0daaa08e160586cf012b6ad70/coverage-7.10.4-cp314-cp314t-win_amd64.whl", hash = "sha256:efcc54b38ef7d5bfa98050f220b415bc5bb3d432bd6350a861cf6da0ede2cdcd", size = 221346, upload-time = "2025-08-17T00:26:19.311Z" }, + { url = "https://files.pythonhosted.org/packages/d8/66/e0531c9d1525cb6eac5b5733c76f27f3053ee92665f83f8899516fea6e76/coverage-7.10.4-cp314-cp314t-win_arm64.whl", hash = "sha256:6f3a3496c0fa26bfac4ebc458747b778cff201c8ae94fa05e1391bab0dbc473c", size = 219368, upload-time = "2025-08-17T00:26:21.011Z" }, + { url = "https://files.pythonhosted.org/packages/d1/61/4e38d86d31a268778d69bb3fd1fc88e0c7a78ffdee48f2b5d9e028a3dce5/coverage-7.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:48fd4d52600c2a9d5622e52dfae674a7845c5e1dceaf68b88c99feb511fbcfd6", size = 216393, upload-time = "2025-08-17T00:26:22.648Z" }, + { url = "https://files.pythonhosted.org/packages/17/16/5c2fdb1d213f57e0ff107738397aff68582fa90a6575ca165b49eae5a809/coverage-7.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:56217b470d09d69e6b7dcae38200f95e389a77db801cb129101697a4553b18b6", size = 216779, upload-time = "2025-08-17T00:26:24.422Z" }, + { url = "https://files.pythonhosted.org/packages/26/99/3aca6b4028e3667ccfbaef9cfd9dca8d85eb14deee7868373cc48cbee553/coverage-7.10.4-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:44ac3f21a6e28c5ff7f7a47bca5f87885f6a1e623e637899125ba47acd87334d", size = 243214, upload-time = "2025-08-17T00:26:26.468Z" }, + { url = "https://files.pythonhosted.org/packages/0d/33/27a7d2557f85001b2edb6a2f14037851f87ca7d69a4ca79460e1859f4c7f/coverage-7.10.4-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3387739d72c84d17b4d2f7348749cac2e6700e7152026912b60998ee9a40066b", size = 245037, upload-time = "2025-08-17T00:26:28.071Z" }, + { url = "https://files.pythonhosted.org/packages/6d/68/92c0e18d36d34c774cb5053c9413188c27f8b3f9587e315193a30c1695ce/coverage-7.10.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f111ff20d9a6348e0125be892608e33408dd268f73b020940dfa8511ad05503", size = 246809, upload-time = "2025-08-17T00:26:29.828Z" }, + { url = "https://files.pythonhosted.org/packages/03/22/f0618594010903401e782459c100755af3f275ea86d49b0d4f3afa3658d9/coverage-7.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:01a852f0a9859734b018a3f483cc962d0b381d48d350b1a0c47d618c73a0c398", size = 244695, upload-time = "2025-08-17T00:26:31.495Z" }, + { url = "https://files.pythonhosted.org/packages/e5/45/e704923a037a4a38a3c13ae6405c31236db2d274307ab28fd1a23b961cad/coverage-7.10.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:225111dd06759ba4e37cee4c0b4f3df2b15c879e9e3c37bf986389300b9917c3", size = 242766, upload-time = "2025-08-17T00:26:33.425Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b7/4dc6f2b41aa907ae330ed841deb49c9487f6ec5072a577fc3a3b284fe855/coverage-7.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2178d4183bd1ba608f0bb12e71e55838ba1b7dbb730264f8b08de9f8ef0c27d0", size = 243723, upload-time = "2025-08-17T00:26:35.688Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/0e58abc2ce2d9a5b906dd1c08802864f756365843c413aebf0184148ddfb/coverage-7.10.4-cp39-cp39-win32.whl", hash = "sha256:93d175fe81913aee7a6ea430abbdf2a79f1d9fd451610e12e334e4fe3264f563", size = 218927, upload-time = "2025-08-17T00:26:37.725Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3e/4668a5b5601450d9c8aa71cc4f7e6c6c259350e577c758b894443598322a/coverage-7.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:2221a823404bb941c7721cf0ef55ac6ee5c25d905beb60c0bba5e5e85415d353", size = 219838, upload-time = "2025-08-17T00:26:39.786Z" }, + { url = "https://files.pythonhosted.org/packages/bb/78/983efd23200921d9edb6bd40512e1aa04af553d7d5a171e50f9b2b45d109/coverage-7.10.4-py3-none-any.whl", hash = "sha256:065d75447228d05121e5c938ca8f0e91eed60a1eb2d1258d42d5084fecfc3302", size = 208365, upload-time = "2025-08-17T00:26:41.479Z" }, ] [package.optional-dependencies] @@ -1347,11 +1347,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.18.0" +version = "3.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, ] [[package]] @@ -3114,29 +3114,29 @@ wheels = [ [[package]] name = "polars" -version = "1.32.2" +version = "1.32.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/07/7864688644139b1bc198c36e34e81581ec76c8d20ae57c4879914786b2a1/polars-1.32.2.tar.gz", hash = "sha256:b4c5cefc7cf7a2461f8800cf2c09976c47cb1fd959c6ef3024d5618b497f05d3", size = 4788876, upload-time = "2025-08-07T10:51:16.587Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/f2/1a76a8bd902bc4942e435a480f362c8687bba60d438ff3283191e38568fa/polars-1.32.3.tar.gz", hash = "sha256:57c500dc1b5cba49b0589034478db031815f3d57a20cb830b05ecee1a9ba56b1", size = 4838448, upload-time = "2025-08-14T17:28:10.702Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/61/b251ce6755d0d3c6f4c8cb245941b80901624fce7efeb95b37c170da2565/polars-1.32.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f21da6a5210898ec800b7e9e667fb53eb9161b7ceb812ee6555ff5661a00e517", size = 37770199, upload-time = "2025-08-07T10:50:05.627Z" }, - { url = "https://files.pythonhosted.org/packages/22/74/ea073a88073cd6025b12850484f51d30dad695b51432a3f0a0439e2f8094/polars-1.32.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:d3f4e061312ef6c2a907378ce407a6132734fe1a13f261a1984a1a9ca2f6febc", size = 34461798, upload-time = "2025-08-07T10:50:10.452Z" }, - { url = "https://files.pythonhosted.org/packages/ec/14/ee34ebe3eb842c83ca1d2d3af6ee02b08377e056ffad156c9a2b15a6d05c/polars-1.32.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a711a750cfc19f1f883d2b46895dd698abf4d446ca41c3bf510ced0ff1178057", size = 38280883, upload-time = "2025-08-07T10:50:14.168Z" }, - { url = "https://files.pythonhosted.org/packages/af/ed/0f5ab777b0dc95936074266bc6fd9cf117886ebdd6685589437edfdbb186/polars-1.32.2-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d1c53a828eedc215fb0dabc7cef02c6f4ad042157512ddb99840fd42b8da1e8a", size = 35490843, upload-time = "2025-08-07T10:50:18.325Z" }, - { url = "https://files.pythonhosted.org/packages/91/58/d2840554ef1c69e06a28ee928bdaa0d6a61af12205ff24c096628f217f99/polars-1.32.2-cp39-abi3-win_amd64.whl", hash = "sha256:5e1660a584e89e1d60cd89984feca38a695e491a966581fefe8be99c230ea154", size = 37897347, upload-time = "2025-08-07T10:50:22.247Z" }, - { url = "https://files.pythonhosted.org/packages/b8/53/4eaaa4f219add46594db21a05a9a5629ec6af20bd859a90668d5a1448abc/polars-1.32.2-cp39-abi3-win_arm64.whl", hash = "sha256:cd390364f6f3927474bd0aed255103195b9d2b3eef0f0c5bb429db5e6311615e", size = 34059100, upload-time = "2025-08-07T10:50:26.445Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9b/5937ab9f8fa49c8e00617aeb817a5ffa5740434d5bb8a90f2afa657875aa/polars-1.32.3-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c7c472ea1d50a5104079cb64e34f78f85774bcc69b875ba8daf21233f4c70d42", size = 37935794, upload-time = "2025-08-14T17:26:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/6e/e9/88f5332001b9dd5c8e0a4fab51015f740e01715a081c41bc0f7ad2bf76a5/polars-1.32.3-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:fd87275f0cc795e72a2030b58293198cfa748d4b009cf52218e27db5397ed07f", size = 34621102, upload-time = "2025-08-14T17:27:00.521Z" }, + { url = "https://files.pythonhosted.org/packages/ab/8a/6f56af7e535c34c95decc8654786bfce4632ba32817dc2f8bad18571ef9a/polars-1.32.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a9b9668ef310e5a77a7e7daa9c753874779c8da52e93f654bfd7953eb4b60b", size = 38443071, upload-time = "2025-08-14T17:27:08.382Z" }, + { url = "https://files.pythonhosted.org/packages/46/aa/63536ea5780edc0ef6850679dc81d519f3966c7bb11a5cf10ccecb541095/polars-1.32.3-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:c8f5d2f43b80b68e39bfaa2948ce632563633466576f12e74e8560d6481f5851", size = 35639598, upload-time = "2025-08-14T17:27:12.261Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c8/226953cda6cf9ae63aa9714d396a9138029e31db3c504c15d6711b618f8f/polars-1.32.3-cp39-abi3-win_amd64.whl", hash = "sha256:db56a7cb4898e173d62634e182f74bdff744c62be5470e0fe20df8d10f659af7", size = 38038192, upload-time = "2025-08-14T17:27:15.993Z" }, + { url = "https://files.pythonhosted.org/packages/ec/99/6b93c854e602927a778eabd7550204f700cc4e6c07be73372371583dda3e/polars-1.32.3-cp39-abi3-win_arm64.whl", hash = "sha256:a2e3f87c60f54eefe67b1bebd3105918d84df0fd6d59cc6b870c2f16d2d26ca1", size = 34198919, upload-time = "2025-08-14T17:27:21.423Z" }, ] [[package]] name = "polyfactory" -version = "2.22.1" +version = "2.22.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "faker" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ea/21/0b1a08bd4ab7d4685b4b83969a7b93a11c6557955765ada1b28d362d177c/polyfactory-2.22.1.tar.gz", hash = "sha256:6c91693088c81ab8fbe22dc66cae21fd3c17f91930fe1fae5b35b030eb020d3a", size = 253987, upload-time = "2025-07-14T19:37:29.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/a6/950d13856d995705df33b92451559fd317207a9c43629ab1771135a0c966/polyfactory-2.22.2.tar.gz", hash = "sha256:a3297aa0b004f2b26341e903795565ae88507c4d86e68b132c2622969028587a", size = 254462, upload-time = "2025-08-15T06:23:21.28Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/8d/245e02e6ff1f046f70636dc62380ea73b03ab1d7f1fdcf61cbe26bc9c030/polyfactory-2.22.1-py3-none-any.whl", hash = "sha256:7500ee3678d9bc25347c0a73a35d3711cfcf9c7f45ad56d0bb085e9f75ecae7a", size = 63547, upload-time = "2025-07-14T19:37:27.353Z" }, + { url = "https://files.pythonhosted.org/packages/e7/fe/d52c90e07c458f38b26f9972a25cb011b2744813f76fcd6121dde64744fa/polyfactory-2.22.2-py3-none-any.whl", hash = "sha256:9bea58ac9a80375b4153cd60820f75e558b863e567e058794d28c6a52b84118a", size = 63715, upload-time = "2025-08-15T06:23:19.664Z" }, ] [[package]] @@ -3295,116 +3295,116 @@ wheels = [ [[package]] name = "protobuf" -version = "6.31.1" +version = "6.32.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/f3/b9655a711b32c19720253f6f06326faf90580834e2e83f840472d752bc8b/protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a", size = 441797, upload-time = "2025-05-28T19:25:54.947Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614, upload-time = "2025-08-14T21:21:25.015Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/6f/6ab8e4bf962fd5570d3deaa2d5c38f0a363f57b4501047b5ebeb83ab1125/protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9", size = 423603, upload-time = "2025-05-28T19:25:41.198Z" }, - { url = "https://files.pythonhosted.org/packages/44/3a/b15c4347dd4bf3a1b0ee882f384623e2063bb5cf9fa9d57990a4f7df2fb6/protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447", size = 435283, upload-time = "2025-05-28T19:25:44.275Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c9/b9689a2a250264a84e66c46d8862ba788ee7a641cdca39bccf64f59284b7/protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402", size = 425604, upload-time = "2025-05-28T19:25:45.702Z" }, - { url = "https://files.pythonhosted.org/packages/76/a1/7a5a94032c83375e4fe7e7f56e3976ea6ac90c5e85fac8576409e25c39c3/protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39", size = 322115, upload-time = "2025-05-28T19:25:47.128Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/b59d405d64d31999244643d88c45c8241c58f17cc887e73bcb90602327f8/protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6", size = 321070, upload-time = "2025-05-28T19:25:50.036Z" }, - { url = "https://files.pythonhosted.org/packages/b1/f0/4160dbd205eee8fdf8647d154e7ceaa9d25b3a877b6311274eb6dc896b75/protobuf-6.31.1-cp39-cp39-win32.whl", hash = "sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16", size = 423626, upload-time = "2025-05-28T19:25:51.355Z" }, - { url = "https://files.pythonhosted.org/packages/09/34/13989eb9f482409ed821bfa3e34e6a3878b42607c38e7f7572b4cc825091/protobuf-6.31.1-cp39-cp39-win_amd64.whl", hash = "sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9", size = 435347, upload-time = "2025-05-28T19:25:52.932Z" }, - { url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e", size = 168724, upload-time = "2025-05-28T19:25:53.926Z" }, + { url = "https://files.pythonhosted.org/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409, upload-time = "2025-08-14T21:21:12.366Z" }, + { url = "https://files.pythonhosted.org/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735, upload-time = "2025-08-14T21:21:15.046Z" }, + { url = "https://files.pythonhosted.org/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449, upload-time = "2025-08-14T21:21:16.687Z" }, + { url = "https://files.pythonhosted.org/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869, upload-time = "2025-08-14T21:21:18.282Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009, upload-time = "2025-08-14T21:21:19.893Z" }, + { url = "https://files.pythonhosted.org/packages/84/9c/244509764dc78d69e4a72bfe81b00f2691bdfcaffdb591a3e158695096d7/protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb", size = 424503, upload-time = "2025-08-14T21:21:21.328Z" }, + { url = "https://files.pythonhosted.org/packages/9b/6f/b1d90a22f619808cf6337aede0d6730af1849330f8dc4d434cfc4a8831b4/protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3", size = 435822, upload-time = "2025-08-14T21:21:22.495Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287, upload-time = "2025-08-14T21:21:23.515Z" }, ] [[package]] name = "psqlpy" -version = "0.11.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8e/40babfa88d73591286182d9eda5511589994233adda9ffb5bd4c3c7652b6/psqlpy-0.11.4.tar.gz", hash = "sha256:5165e8b69f640dbbeeefaea58585b6d7b462bc9e90de0828c04a0521082a18a9", size = 290188, upload-time = "2025-08-14T09:12:10.675Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/96/7982dff18b953c1b27fd1092d1eba035dbc6ef4ab1eb759943db6c5edde9/psqlpy-0.11.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f0009aed360ba3b5c70b93ab264d12ae2f7f46e77cb977ac83741680bd175e3c", size = 4309718, upload-time = "2025-08-14T08:41:29.949Z" }, - { url = "https://files.pythonhosted.org/packages/06/c4/5bd280b4a1f54c72c5f699f5b5d64b94f63481ca71a7ca75e22c885fc304/psqlpy-0.11.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676664b65e78754b3a72013079a25898631e5fb66bb3eb912eb79ddc27a50eb6", size = 4512969, upload-time = "2025-08-14T08:41:33.906Z" }, - { url = "https://files.pythonhosted.org/packages/4f/b3/bbd91284cd74156cc5773f2befe20ad767700d0d1e063438a205368944fe/psqlpy-0.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9584bbcb768e5ae9fcec4f8ef819cd163126e6d0b90a5036e8dbad573a33ba3", size = 5044023, upload-time = "2025-08-14T08:41:36.506Z" }, - { url = "https://files.pythonhosted.org/packages/0f/d6/b3f401f24655e3eaccf1fe980c3f8bc76443a9d6e680cf815ed4f7cb9fb1/psqlpy-0.11.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e12d53ec40d9f1e1363edee5b1e467d3d5646f041afd6747eaa438e6bdb95131", size = 4298505, upload-time = "2025-08-14T08:41:39.027Z" }, - { url = "https://files.pythonhosted.org/packages/36/55/d5c39f0b4774d1dc63f9054a63198fa03a24a443b371fdc31d3be5c5eeb0/psqlpy-0.11.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe0755559bebd62d77321bd7496c1be72911d8a8077116763b909ca18aebb1f9", size = 4926981, upload-time = "2025-08-14T08:41:41.621Z" }, - { url = "https://files.pythonhosted.org/packages/dd/e2/8dc80be09255223343916d3d57e71ebcdd2838c38bdc3ae48b14d96e46dc/psqlpy-0.11.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e472c967f750e61351f6fb643a9f19183d9f3e0ffff8a53fc0cd33845a04569", size = 5034500, upload-time = "2025-08-14T08:41:44.069Z" }, - { url = "https://files.pythonhosted.org/packages/1d/2f/22b5a5f36f5ea1c49fd862d2a3936e1c52ebc33cb5e6510bf8345a322c42/psqlpy-0.11.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d457b3c6a50e86e797baef7b97f4ecb3e884db82f5ce202bcf56d4d88b29e6c5", size = 4699619, upload-time = "2025-08-14T08:41:46.69Z" }, - { url = "https://files.pythonhosted.org/packages/d1/af/aa9e8f70f901c85203e2c04be21e65b00d3910441d686734be7ce1c54842/psqlpy-0.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ea116c31d641de3ecad803df13b24cd1eff060d7bbc93e2cfe0477458bc040", size = 4927790, upload-time = "2025-08-14T08:41:48.773Z" }, - { url = "https://files.pythonhosted.org/packages/fc/23/80864d2ec1ad0e3f6be988e45e5083edd20f6e299e9db80a2df3428b2b94/psqlpy-0.11.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:19bd4e686b2e9536d65d821429509261eb9a43ce56785fd51d5be1e14cd9f3e2", size = 4955661, upload-time = "2025-08-14T08:41:50.811Z" }, - { url = "https://files.pythonhosted.org/packages/64/e0/c5b05464e0eb914161d384695202bfda133f388d93059439d585dbf945a6/psqlpy-0.11.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d0bc4a0f813622fa6bb3e0ea687e67c746db1abccfa5620c644180e456dc5e2a", size = 5066954, upload-time = "2025-08-14T08:41:54.096Z" }, - { url = "https://files.pythonhosted.org/packages/65/9c/24d76002256ac33b340f89dbbeb91f8a8ac766a7f0836133922a53a56044/psqlpy-0.11.4-cp310-cp310-win32.whl", hash = "sha256:3fa567fe209aaf157047552170bab5a73c94d7f6377113b6dc3bc143e3e02d39", size = 3356380, upload-time = "2025-08-14T08:41:56.055Z" }, - { url = "https://files.pythonhosted.org/packages/72/a9/327bd74604ab69eff5e5a9b2a72143f00700b0491d42f16e7cecfeaa8c3f/psqlpy-0.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:41090e01af00a3931c1ab352dc3e47b488c7987160ec9b19d90dde70b511a20c", size = 3760670, upload-time = "2025-08-14T08:41:58.125Z" }, - { url = "https://files.pythonhosted.org/packages/f7/29/2833de8e1b823cd6710a7235cbf668c76e0245f93277b9a2ad984174c3f9/psqlpy-0.11.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6543270b65201fbc61d4bcb7e14ae8d50b279e134b7c983d01337bd9666ad08c", size = 4309147, upload-time = "2025-08-14T08:42:00.52Z" }, - { url = "https://files.pythonhosted.org/packages/44/62/93fb5fa393a24c3998b1a571ec3a073bcc7050afffc481cdc9e27a62a44a/psqlpy-0.11.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7630bcc949dbf5603f13f49a20d5d817c3fe6404f9930a3113fbae597b1db4c3", size = 4510920, upload-time = "2025-08-14T08:42:02.837Z" }, - { url = "https://files.pythonhosted.org/packages/10/6b/9a583a73e7480198421471d3827d6c0e98629f83dfc82d409d8f952764e9/psqlpy-0.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6130442eca0bc43a5e22ccc7158d8675e17e46797ef0195cd7c5c96d653d3b9e", size = 5042446, upload-time = "2025-08-14T08:42:04.897Z" }, - { url = "https://files.pythonhosted.org/packages/06/b7/96e73807ab5045ab66fdf683e887def781be8718252e9529b2004c11a63b/psqlpy-0.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82caf18bfe218540e2f7c2ba1dae9c5d9855ad60de91d863009b663d4b5122c9", size = 4298490, upload-time = "2025-08-14T08:42:07.206Z" }, - { url = "https://files.pythonhosted.org/packages/76/5f/62468c362b8fc7b8f7cd53ea5d67fc3b755e49212540821f8cd73890c1ef/psqlpy-0.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0c52c2af7e8908731c5b81c984bdb095c40b44d866687977944cad707eee194", size = 4926630, upload-time = "2025-08-14T08:42:10Z" }, - { url = "https://files.pythonhosted.org/packages/9a/41/9edd3cc9b334c9cb655a12820fed901648e44d0ec855604a6157eb4a614f/psqlpy-0.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce52c351b85c57708959bf3f2fe0ac9d0d2b21b8f46ea9c9d3e94ab61c3da9b7", size = 5034246, upload-time = "2025-08-14T08:42:12.165Z" }, - { url = "https://files.pythonhosted.org/packages/98/41/6185d3a2fa2e5ad086b763ca17934f14e0770a7b9a76b63fdc33d1b5c3fc/psqlpy-0.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5482c529471ffdcdee62af1e06747b8b6845004764bd300ea4496bda99da182", size = 4699816, upload-time = "2025-08-14T08:42:14.071Z" }, - { url = "https://files.pythonhosted.org/packages/52/7e/b10a4f5f9c1d4a5c33b78090ca3c033c6d01eb96f7e7a546b0ddd0a6f3c9/psqlpy-0.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e6e115a4e9d14bceddcf350d92c9e67ed4b40ef34eee4fc66be1663faec61e", size = 4928103, upload-time = "2025-08-14T08:42:16.226Z" }, - { url = "https://files.pythonhosted.org/packages/93/65/ae6400bcd2e47fc413bc17c4dcb059ee946013e2ef5a3104295c53ab59ca/psqlpy-0.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:826f2c0df29d0d9696513aa2944aa1d7d9b1ee6f9aa6d2bcbbbdd852784f01fa", size = 4955367, upload-time = "2025-08-14T08:42:18.911Z" }, - { url = "https://files.pythonhosted.org/packages/82/0a/0c99b62b7d3a90cd4dfcb5e6459cb1d6fe55392c0d7967784ac8c3295fff/psqlpy-0.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3321a971a90df8f5806d7ba5513b87b47ee114fcfbbb98bc1449db0da55d29ab", size = 5067086, upload-time = "2025-08-14T08:42:21.823Z" }, - { url = "https://files.pythonhosted.org/packages/39/75/d088bb1eb29acfac4e3823597adf67dc66282e50e30c4f39bdf4847e459c/psqlpy-0.11.4-cp311-cp311-win32.whl", hash = "sha256:d6225b1447037e60f0eec065d24f7433cb77c2b5cbccf5d6ea8607b4abc6809e", size = 3354073, upload-time = "2025-08-14T08:42:24.433Z" }, - { url = "https://files.pythonhosted.org/packages/99/49/cd0bca36d7b7f17e0253ae73c38c9c4ec68923c030ba6a92f4e11b178323/psqlpy-0.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:5214f251a7d5315ebc01e0c7ca6b9103d3a321e3d89b0e9f520420a4a5c99c85", size = 3759893, upload-time = "2025-08-14T08:42:27.76Z" }, - { url = "https://files.pythonhosted.org/packages/a2/76/a2cb3af82db0f4f05a999f1eb77a7d63ebf51b8c90cebebf045d551f9cd9/psqlpy-0.11.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4452ec42b07ef63169a4427f53cfe8021c2d2181992969ae627af3af005db3a5", size = 4286702, upload-time = "2025-08-14T08:42:30.065Z" }, - { url = "https://files.pythonhosted.org/packages/b7/32/486da263017e74147bcae96901a7c43dd895b9732ed40a9a276a00c71c56/psqlpy-0.11.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:29aea5bb164302284852b46cc4be8ab4e4df3f5e4cbdb744c2c8811d87a859ea", size = 4489824, upload-time = "2025-08-14T08:42:32.568Z" }, - { url = "https://files.pythonhosted.org/packages/a2/76/be44108b26651eacddb8460bd2cd584b5a5e16834f433b20ec7426421eef/psqlpy-0.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71694d170db4cff7efb25e1ad0507ba6ba1dbb131213fa12c476621009aa651e", size = 5029062, upload-time = "2025-08-14T08:42:35.008Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d0/4d019d8213a853247e2d3bb28d6d3e74ac73ffde5db0a346631892b6b9a6/psqlpy-0.11.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f7b6ee237d4387df389245e20e09370bab95f12e5d57325b48bc326fa348f68d", size = 4289114, upload-time = "2025-08-14T08:42:37.354Z" }, - { url = "https://files.pythonhosted.org/packages/9a/0f/38ba3fdb03bd4114038f6238bf85eab7882a0d53ae576624c8f7589b4357/psqlpy-0.11.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27798b066e7413ecc3ae7a0ded458f6267b4a169ddf9dc077f6c7b105d710d9f", size = 4899028, upload-time = "2025-08-14T08:42:39.473Z" }, - { url = "https://files.pythonhosted.org/packages/82/d6/ad5f51e04200fa3e8b1b5b26af3eb3abbe3039400d8d784d9674a90d9ee0/psqlpy-0.11.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f97919461d1d537c7eafe32d6611b496447bc2f484e94c92b7614d6e1e836ce4", size = 5019343, upload-time = "2025-08-14T08:42:41.881Z" }, - { url = "https://files.pythonhosted.org/packages/9d/19/2c06d9133979fbcb68ff8ea9e3489f11d97fc402524f7399df6bee0324d8/psqlpy-0.11.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c369d8a52335acefffd617ffe0e2f77ee6586da4fdfc1f8cb939a5dc7d302fa", size = 4677079, upload-time = "2025-08-14T08:42:43.974Z" }, - { url = "https://files.pythonhosted.org/packages/90/04/c36c34ff5138f8b767b20305e44319fb8d97bac40c263b0f206101a4466c/psqlpy-0.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44b746f44e15ae06ffbac957a4975491bd2024c2ff9d2b743b911e4c92cc2b06", size = 4925320, upload-time = "2025-08-14T08:42:46.528Z" }, - { url = "https://files.pythonhosted.org/packages/6d/08/33d9d032d2a4ce90244ba2c4152b40688f2bb8f6637b7f88f0c36037813b/psqlpy-0.11.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:13e6eeeb8002baf34a0f49f638d73a3f68bb6470865f9ca9a115272f962e12b6", size = 4930091, upload-time = "2025-08-14T08:42:48.836Z" }, - { url = "https://files.pythonhosted.org/packages/50/eb/53bec2233414f5d50b7c3f8a73ed45ed35995f34166336f374726ac707c9/psqlpy-0.11.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:444ddf597a4edafe90e4a568c031d4698fb221e3b58de7307a5608c524091781", size = 5063064, upload-time = "2025-08-14T08:42:51.409Z" }, - { url = "https://files.pythonhosted.org/packages/88/21/34d4095c059f64e387263869070308fa136eaf63b2778e5825c733534998/psqlpy-0.11.4-cp312-cp312-win32.whl", hash = "sha256:1660247e393abb61b81214af7a9bf4787e2ae9de8fec7645ce9df4cf1268086d", size = 3356744, upload-time = "2025-08-14T08:42:53.209Z" }, - { url = "https://files.pythonhosted.org/packages/20/b8/b16150d738bbc83043c5a0f759ef6c73efa50ec439e94c995ef5733d8dc7/psqlpy-0.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:5bb66a7978fc0f0c02d167c8a084e8c7a43c7628dcf69416940b63ba8aeac9ec", size = 3765608, upload-time = "2025-08-14T08:42:56.111Z" }, - { url = "https://files.pythonhosted.org/packages/f1/0a/a70017f622c50d51f7fe202cea35698d3d007e53d2a31d712a2e643df657/psqlpy-0.11.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:55e1ca321461f1675f9041bf4c3257051ea324320313435b06493cf55f32c155", size = 4284840, upload-time = "2025-08-14T08:42:58.417Z" }, - { url = "https://files.pythonhosted.org/packages/74/77/8cb0fb9c4095c4afdc19b939d3cfce17c7f145f184c723f74076e61154b9/psqlpy-0.11.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5081d67c1115ac302dc31dd5d62b2cb5d8365abc324cbe3f18110b8147450e9f", size = 4490518, upload-time = "2025-08-14T08:43:00.807Z" }, - { url = "https://files.pythonhosted.org/packages/91/3c/8f3325ee8865af4be496caa23b4a5f9a08e4d06abe634de781b720b103dc/psqlpy-0.11.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29099623fe339d3d396be05d73f7b9144f850550b87aeed47e51130e5329f205", size = 5031400, upload-time = "2025-08-14T08:43:03.699Z" }, - { url = "https://files.pythonhosted.org/packages/e5/73/4f7f40548e194c7d7a615e894162e2731176c9018a9d888dde1a46579365/psqlpy-0.11.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e95c38b2617825265c41e2ecaedee5beaf531d2f42a7cf368b18270c6dcdd24d", size = 4289769, upload-time = "2025-08-14T08:43:06.127Z" }, - { url = "https://files.pythonhosted.org/packages/3f/3e/c4ee89d3a18e2bdddb77e6f48c586424d16720f03fd125d655370ed1f5a2/psqlpy-0.11.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd715d8185eccd2c77216bfcdc962e5ed5b146c9b78b5df92222ab902781519f", size = 4911483, upload-time = "2025-08-14T08:43:08.235Z" }, - { url = "https://files.pythonhosted.org/packages/89/c7/db79fe484299c277e05d2434b0e7f6f1d80ff11c0e5f53fd607e1448e588/psqlpy-0.11.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08a331a6634edfc6285e328011d177bc190ff62167ef98403ba7096dded72d46", size = 5018283, upload-time = "2025-08-14T08:43:10.392Z" }, - { url = "https://files.pythonhosted.org/packages/b1/0c/e50bc06d73af42b84bdc79b9a055bf855e38518fdeabaa32d19a28792c0d/psqlpy-0.11.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53032983a1ee48e9af6cfa2ca40b6b6ea6da863c0b207b991cc5c0eda2fea79b", size = 4694187, upload-time = "2025-08-14T08:43:12.84Z" }, - { url = "https://files.pythonhosted.org/packages/59/05/20b35f56bdd9b2e8c1b5837b45067b9b7bc99f4af31914e083ef0e1ec18e/psqlpy-0.11.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47e17b6575387c8f2cb5d3b9b90089914a0d62bc837ca38a4d6abcfb2cf61a8", size = 4922955, upload-time = "2025-08-14T08:43:14.832Z" }, - { url = "https://files.pythonhosted.org/packages/8b/de/220695bf6c801a7ec6ce9a828cc67e5a5576323fe0b9b8959825c05db892/psqlpy-0.11.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c84cd445581499d7982d1162b82ca39a59ede2556b15490bf65466aa88a248c0", size = 4942011, upload-time = "2025-08-14T08:43:17.023Z" }, - { url = "https://files.pythonhosted.org/packages/bb/a2/cfb3b44d1cb8dbc2af43b4aa2606cf288de048ca2c6f5321b23323f64cd5/psqlpy-0.11.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94a12830982725686f8040d3b216f0743ad3909153e5bdcf9c497dec254998e9", size = 5060338, upload-time = "2025-08-14T08:43:19.505Z" }, - { url = "https://files.pythonhosted.org/packages/71/07/e98b021c71337fa17044f7190427a3e9aa53c8bdaa19dc92d4b505d5f5f5/psqlpy-0.11.4-cp313-cp313-win32.whl", hash = "sha256:546f4589014813ed06ada386052b50f045958d71289375230678755c1ce20199", size = 3357017, upload-time = "2025-08-14T08:43:21.377Z" }, - { url = "https://files.pythonhosted.org/packages/38/c8/5abf0e2cf58bc57e8ea313fcfb170c77bd768e1b7abe161fd637ba0f7e7e/psqlpy-0.11.4-cp313-cp313-win_amd64.whl", hash = "sha256:0134d01637aeceb496723e32634faef00a0320f520f1e404a0ff04875eedb3d8", size = 3765210, upload-time = "2025-08-14T08:43:24.24Z" }, - { url = "https://files.pythonhosted.org/packages/36/19/91e1a52be9668ab1c7196c682166e492d056d9b0a209f513c2c4e962cb6f/psqlpy-0.11.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7d73c7e8c71caaf7e595f077c921ea0ea1226c9907dc48c7b339c38cfc519cfa", size = 4309965, upload-time = "2025-08-14T08:43:26.783Z" }, - { url = "https://files.pythonhosted.org/packages/c2/55/87c74143d8d4548fd699c60a51b1aebb6dbb18d7e10034ea123bd32e8a4a/psqlpy-0.11.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:504237a577595c04712a07f84fefa008b31f966a6a80c75467a3ccf5524f6868", size = 4513889, upload-time = "2025-08-14T08:43:29.728Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/7c77bd145fe88e83a555bd0c319ce8c3a16a0594b7210b654a8753a09c3e/psqlpy-0.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2c7a35776b5cbc0e65ef12b9ff44c63250a36c5d81d1a36db6daa32acd7258d", size = 5044923, upload-time = "2025-08-14T08:43:31.678Z" }, - { url = "https://files.pythonhosted.org/packages/62/0f/b600e06f57b72d5eb217cdf3720f8978e1d18bfa9593ecdb2ac89d4310eb/psqlpy-0.11.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81017357d30df8baa64f90899683cf92ba84891d3f53b85bbbf728af3fd64745", size = 4299450, upload-time = "2025-08-14T08:43:33.985Z" }, - { url = "https://files.pythonhosted.org/packages/03/e3/e03c3b97932752b037146099ed5a03064f433aa4cd9875934c110b6285ff/psqlpy-0.11.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c83f7d6a2e8c7d8e3a0499e970686dfe93ce5e34f83db05ef7da0a17a030aeb4", size = 4928217, upload-time = "2025-08-14T08:43:36.888Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/dcd2191d5e43a8761f9933229567de1cca8be6daea35b0305ee9757038eb/psqlpy-0.11.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7be5e090837c55ed07963abbdd039a2b2b02fadea9c9ba6ea7894c25dcaf17bf", size = 5037082, upload-time = "2025-08-14T08:43:39.355Z" }, - { url = "https://files.pythonhosted.org/packages/6e/1c/7c794ca5e4ce3e6f5920b9cf9a770016cc348ee3955d0d7daaf2abe3079c/psqlpy-0.11.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3bdf8e40c02b302d149be0ad8d469154659b70c9a9fccd8f98bb4913ba9d981d", size = 4698835, upload-time = "2025-08-14T08:43:41.36Z" }, - { url = "https://files.pythonhosted.org/packages/6f/b2/0894ca29d94ca3d76a488d47d2cd18b47f4de65482651d932d81056db59f/psqlpy-0.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c425ba3185616770edf6cc4a3200e9f25b992d22fd594919cb3c7efc0249631b", size = 4929131, upload-time = "2025-08-14T08:43:43.465Z" }, - { url = "https://files.pythonhosted.org/packages/b6/0f/f88c6f510a2220788ea7e1a06b0f8d562706d8e388f36a5aebb5a404d765/psqlpy-0.11.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9bbe5c697b5a494f47ee3aed586ba5d15954eae9edbbb5e2911e056332ab38f0", size = 4955968, upload-time = "2025-08-14T08:43:45.825Z" }, - { url = "https://files.pythonhosted.org/packages/52/04/2c026361ec7f13f2a90bb1a48e54d5132a73f3b6643d76e38c5b434644bb/psqlpy-0.11.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5f706d6f2bc4a08e0cb94c221dda7234901859cb4537d59764ebfe795027de64", size = 5067871, upload-time = "2025-08-14T08:43:47.847Z" }, - { url = "https://files.pythonhosted.org/packages/80/58/bf8c49750f2d0a5bfd1fe5b37aea35b03eef3173a9e5abc7ea8c688ca56f/psqlpy-0.11.4-cp39-cp39-win32.whl", hash = "sha256:6f6e803191c582ae85c3e2a8a5f29e2ceb59e81bf4bc9d3baeccebfcd73eaecb", size = 3356927, upload-time = "2025-08-14T08:43:50.086Z" }, - { url = "https://files.pythonhosted.org/packages/a1/17/b47a665a83181ac5213117f6e69e74da12dad0614eae3d2e3d6024048f87/psqlpy-0.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:b157ff7d73fa6f19f617412391291e26d2bc75dfd5d7a27826e57145d04f9415", size = 3761352, upload-time = "2025-08-14T08:43:51.981Z" }, - { url = "https://files.pythonhosted.org/packages/72/09/f168b54afcdf8c36115c3125cc60a45694d287c6c7727cc24f85767f1bd3/psqlpy-0.11.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e4e1ab9a7c98769d09a9557af59e8cfd2b5c307d36133a7f2013d558151e8d37", size = 4305382, upload-time = "2025-08-14T08:43:54.636Z" }, - { url = "https://files.pythonhosted.org/packages/11/d4/6b969e6147c9b4493e9fab52303606493a33a6eea0fda1251fab9d54811c/psqlpy-0.11.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c3f813f7f7b7095a1f56ba351013ddd85a03a77c3ae38170d183f64742e63fd5", size = 4505679, upload-time = "2025-08-14T08:43:56.607Z" }, - { url = "https://files.pythonhosted.org/packages/5e/90/44950361fc484e01fca7e5044940bf80d6993c89b8dee401be59a20d25d7/psqlpy-0.11.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ade55257c70b1929be3e1d731b1389466ea2a02f74bed1cf40417a4f6fbb68c", size = 5042759, upload-time = "2025-08-14T08:43:59.038Z" }, - { url = "https://files.pythonhosted.org/packages/7e/4f/20a929782106276e6720e6944001a928ed771ae2d92b9bf81ff1c86eccb3/psqlpy-0.11.4-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3b0aaae7426339e183e268a60fc40fe2e3a1a7c620518b28c2d83348ca5f4b54", size = 4295894, upload-time = "2025-08-14T08:44:00.97Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ca/bab6c1f9a67e32d5393ec84a48487631fce53c42d6ff35b6631937642d43/psqlpy-0.11.4-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:804e5a124d0a2c0a777649d746b36e5cee1e807f6ae742ea320d51cd117f5bc2", size = 4920311, upload-time = "2025-08-14T09:11:29.42Z" }, - { url = "https://files.pythonhosted.org/packages/c5/ae/ebb97a4a9db56bb9a09b41ac5c17ff484a557bb363051d8613bba32557ef/psqlpy-0.11.4-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afba6e6206d8ba47ecf7837f3552685e2cff692ba9c65febeacf59a6ae07019b", size = 5028796, upload-time = "2025-08-14T09:11:30.881Z" }, - { url = "https://files.pythonhosted.org/packages/1a/51/4ed6be9e2350dd7c0814770996959ed4754163f2ea63b8b2fb92ccdb1954/psqlpy-0.11.4-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f72ef1cd0f6270da3f597638ab1e1b340bc75a2b9e33b85d26547d72057fe23f", size = 4681905, upload-time = "2025-08-14T09:11:32.479Z" }, - { url = "https://files.pythonhosted.org/packages/01/1a/0004d2ae122d83653b1fafc542a7ca3b6cadf9631bc3d526bb9473945279/psqlpy-0.11.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbb3f610c917114641ad0132ba07015b4c882d00459ed19979ac45a00428b32", size = 4933794, upload-time = "2025-08-14T09:11:34.233Z" }, - { url = "https://files.pythonhosted.org/packages/3d/9f/0a984d4dad209ccb68a112033229498532ba6d0bd2b1f0fedddadac10c4a/psqlpy-0.11.4-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:c3653d4e7363d50e5f9d5ac76a15fc462b5111a80111354141d784930b20dacf", size = 4951140, upload-time = "2025-08-14T09:11:35.721Z" }, - { url = "https://files.pythonhosted.org/packages/89/e1/707f136f1bf9a1455d459706bf9b19a96946918e05aebc630ccdd99307ad/psqlpy-0.11.4-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:46384aa8d9a1e300a730436c6ca3551a0eb59863a26ca5c7475337f8e0481eac", size = 5071764, upload-time = "2025-08-14T09:11:37.343Z" }, - { url = "https://files.pythonhosted.org/packages/27/a3/cc729b211dc3c2d872510ce8a6759fafafe5fd14f83038f4f41e97754e8a/psqlpy-0.11.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:43de0d4c167bca48831653e8ccb2631d4af0d6e6210ab0649fc5126bf5376e33", size = 4303103, upload-time = "2025-08-14T09:11:38.762Z" }, - { url = "https://files.pythonhosted.org/packages/b3/75/abeae3ac76545cdf4b8054b042b325ea7e2e4aab62c4d3821d60f3d4e99e/psqlpy-0.11.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9fe3ab8b0dc4d74eea5579e43fbc2500e5dff1ea00653cc46f8a51dcca8c57b1", size = 4506333, upload-time = "2025-08-14T09:11:40.477Z" }, - { url = "https://files.pythonhosted.org/packages/b6/2a/38ed70fc148e0ec980041f341e79a06ccf989ad2d9fb76ea53d150c343a2/psqlpy-0.11.4-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264b3e9d3bed0e7bc19557d9ffb9275c0701d91c9375807b81db562a47b1202", size = 5043283, upload-time = "2025-08-14T09:11:42.142Z" }, - { url = "https://files.pythonhosted.org/packages/de/09/819c40619222432aa2871b554fe5564cba00e4fdbf22914d6ed55280a13b/psqlpy-0.11.4-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8612d436dd6ee0b2ecf622362870f271217f241b89de92434dc4fcf59ca3dd04", size = 4296471, upload-time = "2025-08-14T09:11:43.556Z" }, - { url = "https://files.pythonhosted.org/packages/14/52/0063fab3f4ff180a70c1c892db08f4d2df81c4f4a6cfdefd9a146e767ed3/psqlpy-0.11.4-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:725bfc04afc0cf48f408483e6d03134acdc3b3d53c847dbccf486262a287d5c1", size = 4923692, upload-time = "2025-08-14T09:11:44.99Z" }, - { url = "https://files.pythonhosted.org/packages/db/9a/fdd745dc0f9aaa80d2454cd0caac2a2e70d656b2f708a948aa2b65b7d788/psqlpy-0.11.4-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5877bcd74e9cc129080b9e721ccaae84a49b88dbadabcca13b11193f6268880e", size = 5029846, upload-time = "2025-08-14T09:11:47.084Z" }, - { url = "https://files.pythonhosted.org/packages/b3/6f/099489ab96429856f1ac05d18efd6ef70460f3171bac24cf3f24cef59672/psqlpy-0.11.4-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be6ecb02e588bed4510e61a1c1be5b68597d04e55de03a462935347fbb14cba4", size = 4699035, upload-time = "2025-08-14T09:11:48.758Z" }, - { url = "https://files.pythonhosted.org/packages/2b/56/52e581005e16ae40f924695148ebe1d42583c298eb23272bead50fd11422/psqlpy-0.11.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:faa704f395506eb8ca68d652a893ef6fce258d016e9fb1ab62602e15a67d016a", size = 4933932, upload-time = "2025-08-14T09:11:50.189Z" }, - { url = "https://files.pythonhosted.org/packages/d7/c0/d969ad7a5d4fb36b44b1a2fd83ec091abb880d05ea2fce86d903f3c9a9b1/psqlpy-0.11.4-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:3c00db96ac60199d9756e4fa67ac9051ab62b50852ea74d0a8f8acb268f3011f", size = 4952333, upload-time = "2025-08-14T09:11:51.613Z" }, - { url = "https://files.pythonhosted.org/packages/75/78/acfc4fa9f3ceda9a7bdfb21063423c302cc0f3369e6e6bba79fd6c57b440/psqlpy-0.11.4-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d9991a76daee1d153124b93ec2e4a65fba71a5273ec0f6738b0e8b84d2945630", size = 5072043, upload-time = "2025-08-14T09:11:53.1Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/732d475f5e1f253ae2f1b10ad0c50f98c248be6bd3ad373606f3ea771cfc/psqlpy-0.11.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:07051ac8f2705e90f700273371a6b5f61812676ed2da3c3a049a64870a486b8d", size = 4305282, upload-time = "2025-08-14T09:11:54.579Z" }, - { url = "https://files.pythonhosted.org/packages/98/c9/c9188c11543a76bff09c4317e7972a3a02d43b02248426ce35a92ed707ed/psqlpy-0.11.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ca237aa2eb130e460ebf8c32c9925fc6b5d7f63c915981048acebb51a51ad7a8", size = 4505381, upload-time = "2025-08-14T09:11:56.074Z" }, - { url = "https://files.pythonhosted.org/packages/a8/b8/9bbbc5dafc240bc59a37b0fa549d59c455ed45d42c47f8be0206ff3f2fef/psqlpy-0.11.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:783f81c466240c28f78e3dd8a0f95763f3f7b2d99086b82158078712d4736d12", size = 5044313, upload-time = "2025-08-14T09:11:57.74Z" }, - { url = "https://files.pythonhosted.org/packages/d0/be/adb1ca30d47b84d1472e08abfe6dcc1e57392167e749c95334af94027bf9/psqlpy-0.11.4-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7824fbff64c9b6adc9ca5605b45cfc50c806e40f84e536df7651f7bc44463a7a", size = 4297820, upload-time = "2025-08-14T09:11:59.369Z" }, - { url = "https://files.pythonhosted.org/packages/4f/df/5283839d7f5602825efbf1dd76f7a72a6ab4701bbb0f524674dacd0b39b9/psqlpy-0.11.4-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e78b2e41fc4bbf65372bb889c13f7dd61a4199ae5e52f36834fec4bcf3bc85f4", size = 4920650, upload-time = "2025-08-14T09:12:00.912Z" }, - { url = "https://files.pythonhosted.org/packages/bd/b6/979f6de4893d1899a24ed59a84a8c57adadaafa7df4c5414d22edaf987b1/psqlpy-0.11.4-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ae342ff1637ead3f263419211b93ff7423864c2d1f4340ee70d0c7747ef125d", size = 5030537, upload-time = "2025-08-14T09:12:02.383Z" }, - { url = "https://files.pythonhosted.org/packages/3f/d7/8adfdb5670c4bb777498ed6234660b24b133cf4847c39dff4b1c3b12b414/psqlpy-0.11.4-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c54478ae6ccc8763bed1e573f3decc7125292a3a3e6f6e68575bdaca42dffb71", size = 4682027, upload-time = "2025-08-14T09:12:03.952Z" }, - { url = "https://files.pythonhosted.org/packages/3f/44/587471fc8c00d4917e9f150edb7283e6f850d75548f6c926ddbe03c4faba/psqlpy-0.11.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53380a3344c794daa5040f173d389849662eaeb6a600eea1ad34232319334c", size = 4933704, upload-time = "2025-08-14T09:12:05.456Z" }, - { url = "https://files.pythonhosted.org/packages/bb/e0/ca5ef98995053eee7df563dd7389912106b72ca88c04ff68d7a55df69bc2/psqlpy-0.11.4-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:b570403b143689755165d0d908c40259fe66d4cec15f5aba06b00fda95dc994a", size = 4952292, upload-time = "2025-08-14T09:12:07.197Z" }, - { url = "https://files.pythonhosted.org/packages/cd/4f/f7b979c6012cbfe666d24090a4f06d2250578402fed38453bc4d22a972c8/psqlpy-0.11.4-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:12352c214f44fddb69ce1f4f299027e25cc56c322fd63a5d6377bf76dd49ef34", size = 5071934, upload-time = "2025-08-14T09:12:09.057Z" }, +version = "0.11.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/9e/95659a30735fbaa37f30694a0ada3afc30ee1c12f30eca73cea233cc090c/psqlpy-0.11.6.tar.gz", hash = "sha256:7dd11253fb17059db9ddf41c0b96497ba107855905fbcdfee7f7ce3ea1745adb", size = 290188, upload-time = "2025-08-14T17:23:11.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/aa/7a6d62eb8f0a09f6ba9998be29874ee31a352182304d0a04bcd910835b45/psqlpy-0.11.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952e2e4e0bdaa29eaf5967a82f2368f03639f2dfbe9d3c7e0b97b55830a79b27", size = 4310132, upload-time = "2025-08-14T17:20:46.739Z" }, + { url = "https://files.pythonhosted.org/packages/5b/9e/46bc085089f6671f4a5c9888e23113f556edf08e968b1870c5a28b9b22e7/psqlpy-0.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2955068c9bd5873ef77a5b86bcbe4825ce859a55c2c06ab47743d1c075f21e6c", size = 4516406, upload-time = "2025-08-14T17:20:49.488Z" }, + { url = "https://files.pythonhosted.org/packages/7f/c3/ae318e617f90a98de447292431e2ebe88130238656b7f52789363342f2a9/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec7677fc00eee7df29c5727eca54ea496686980ba00dd48311f231b06e01e12e", size = 5042443, upload-time = "2025-08-14T17:20:50.754Z" }, + { url = "https://files.pythonhosted.org/packages/9b/77/1599e3724033ac886db35399023e98d9050bebf7d709a7f76b49fb611aa4/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91dbf904c693d6432ff3a221423d62968a212151b1e945122e74dddce813e150", size = 4298972, upload-time = "2025-08-14T17:20:52.187Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3b/22a57f247755d44388a046530902403d1230c859cc23aad85d242a90f319/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a8f7813be18eda28c313c7fbe94ea2f9a3ca8d376d1a7b04ae4f5a74beb4009", size = 4925185, upload-time = "2025-08-14T17:20:54.024Z" }, + { url = "https://files.pythonhosted.org/packages/b0/78/e386bf4e69d9d594b28e6f22fe000c00eb5a107eeb09e0cdd94c2baa6862/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8411f44e2235bd3f71ed9cc6cca1b889be67e9edae2f8a33698aaaab9ea7a030", size = 5040564, upload-time = "2025-08-14T17:20:55.453Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ef/2d5ca961828c66a2d13638405c6732d89ee7fae88e7b797c7cf217b0723b/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d760b4e0f41e9823a686fed49271abe8be8350be2d8f0c9f75c5a1c180f6c72a", size = 4700508, upload-time = "2025-08-14T17:20:56.812Z" }, + { url = "https://files.pythonhosted.org/packages/95/56/e42506053900a4b8d1a327ceebab47320a537526b82812ba1731417a1e3e/psqlpy-0.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa27eaf360da589fc5212bf58b0edc632f894f8c70ab16c790b55e3bbeedb8e", size = 4928317, upload-time = "2025-08-14T17:20:58.18Z" }, + { url = "https://files.pythonhosted.org/packages/fd/5d/c09b2ac6adfd1c8ef78b9cbc5f2291dbe898283fcd10c63be231ff0ddfd6/psqlpy-0.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2f6cc54f33d377dd8db562b4b6691f2af9c9bedf367bc5d7267547bf74d1ccce", size = 4957986, upload-time = "2025-08-14T17:20:59.538Z" }, + { url = "https://files.pythonhosted.org/packages/9e/57/4ad36f2260bff41e1de26a7877ec5b5c503a846b9c97a8f7669e33643269/psqlpy-0.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:38dd049421c6c7b8d6ee4a88d6404f2af2f4a5cf7722e70201bb1709ba371fb5", size = 5068208, upload-time = "2025-08-14T17:21:00.964Z" }, + { url = "https://files.pythonhosted.org/packages/67/b7/1d9aec7918ed3eab855bca1f3acf42e92e2ddf86fddc3a04de0e76292e10/psqlpy-0.11.6-cp310-cp310-win32.whl", hash = "sha256:f55f816f177db68ab216c8b49432dbf6144db1a3e2867f9f4d2feae0160f7f53", size = 3351818, upload-time = "2025-08-14T17:21:02.815Z" }, + { url = "https://files.pythonhosted.org/packages/80/4d/2b8da947e878c3ade44ba083fa60d57e1b20e5d30dc5879a4c19bb7de3ea/psqlpy-0.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:efc3a488d1cd63813be4f1863de1da9b3ae8d82e270113f365cfcbc2cf961083", size = 3761093, upload-time = "2025-08-14T17:21:04.648Z" }, + { url = "https://files.pythonhosted.org/packages/84/33/add408874f088dbe585d22653dd05dafb50776038d6282c54821877d61e0/psqlpy-0.11.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:205936327b0830298864c46ae2a43ff93a66985c7ee4e66cba7991592393c5c7", size = 4309332, upload-time = "2025-08-14T17:21:06.071Z" }, + { url = "https://files.pythonhosted.org/packages/57/b4/971c5464dec16afea6344d5808e187f5e8e8b3daa3d18878ba9fce9c89c0/psqlpy-0.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ef7e8af9dc40cfd24df7d680e4c9277d6c994acdd58da1dabe823247dbffad9", size = 4515551, upload-time = "2025-08-14T17:21:07.391Z" }, + { url = "https://files.pythonhosted.org/packages/a3/22/aaef103bd5b8c04650c83258b4b241f967afaf76b32d11c92366234e6e1e/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d8ff15e7781e0d7e70f22d8643eb952010623ba34ae8c4868cf2ec58ff24eb", size = 5039985, upload-time = "2025-08-14T17:21:08.765Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6f/a11d8dbc48be3a1587c37990972568c9da58503ec782c5ad905df6d54947/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ab85aa5dbf64725f6d4ed809bef814ad9023891381d9775b9aaecf0f9b420e", size = 4300029, upload-time = "2025-08-14T17:21:10.377Z" }, + { url = "https://files.pythonhosted.org/packages/11/22/0461a9372a27e9cd4969431dfa7076b3c9fcad79d5b8ff0ddfc15080939a/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fbf09fdc049b7dd6f820e61d59ccf325a4f89f2bd133e0f87cb1937b0bba8b", size = 4926113, upload-time = "2025-08-14T17:21:12.112Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/fcd0d5eb78d860b705b36e6f569eb47e0b69f2d793983dda46271b2819e3/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e09b716eebc184e08125c858dda6bf7c9f3be6bc9f9bd28f95a7cdceac0dff82", size = 5041647, upload-time = "2025-08-14T17:21:14.338Z" }, + { url = "https://files.pythonhosted.org/packages/4a/a9/a9e75a58c3e9f95479ab7ecd7f40cbb4fd1c3f21a52e434418efdb3129a3/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1c1d4a21000ee8dcb70ba7a6de5426f49354adcc205786b9a95bdd4e6151ba1", size = 4700315, upload-time = "2025-08-14T17:21:15.861Z" }, + { url = "https://files.pythonhosted.org/packages/1c/2c/d1262e9b2022e51b489e8b5bcc95d136b2247f856f433ac8c70d4fcb0501/psqlpy-0.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d4055b8496b9634540af83772575f3c8c08771279809e9fb0f1c04a8e9b8ece", size = 4928905, upload-time = "2025-08-14T17:21:17.398Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9f/19b96fbbc5ebac42ff87e02fd251db8251b68347acfa57fde146b4bc57aa/psqlpy-0.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:02a5c726d804c14517040ddaee3f698c0b18ddb7aeeef08c490a909ad3a170aa", size = 4958259, upload-time = "2025-08-14T17:21:19.189Z" }, + { url = "https://files.pythonhosted.org/packages/7f/dc/00cc7dd7c067af6da1eb2ede5be062edf3b0e2a289dc5d02555cf4481949/psqlpy-0.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:10521d700d136e08a3817ba78a494171fc6d52749acda7b7e33339120297e8ae", size = 5068671, upload-time = "2025-08-14T17:21:20.935Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b1/40235eac320084198a8a7ef12a9a01794e70921a78537ec30246b58dee47/psqlpy-0.11.6-cp311-cp311-win32.whl", hash = "sha256:1e06d5d7c437246568142caf675ec33c9f29be2683dc1793d3030e61bced8e76", size = 3351919, upload-time = "2025-08-14T17:21:22.355Z" }, + { url = "https://files.pythonhosted.org/packages/05/82/d5513aedb7ea0137ff9a4fc9c1f001804e4d7dcf2a975ad8ed97768459b4/psqlpy-0.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:3c20cae9fda38654931682c688f65610d93f7539de46f5b7353da8633815d166", size = 3760582, upload-time = "2025-08-14T17:21:24.007Z" }, + { url = "https://files.pythonhosted.org/packages/79/fa/a0f9c4fa00faf4d480c402502db76f83dcdb759b45d73d274d3085872e08/psqlpy-0.11.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fbfda19815ae02757b69639b21e0492dbe28d832fdc0cd083b599745c72f3287", size = 4286126, upload-time = "2025-08-14T17:21:25.615Z" }, + { url = "https://files.pythonhosted.org/packages/80/da/4ad1d15e948804fc691abd6e25c3b1f97aa0d005c3e78c359f88111c3be9/psqlpy-0.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6ff1d79ec1e1028ac30cb08381821a4a78ecd823dd2be6cf779dde008da9dde", size = 4493871, upload-time = "2025-08-14T17:21:27.032Z" }, + { url = "https://files.pythonhosted.org/packages/74/52/4cb68092a3df6d89869657dc0b44172f97067789ad1960ec8196cf29d91c/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b07b5a46a7a8b08e9d9cd7c7765a2d2b875af2b6aa26fe30433b42de56f8bf3", size = 5033454, upload-time = "2025-08-14T17:21:28.67Z" }, + { url = "https://files.pythonhosted.org/packages/08/fa/3b253694faf2369295edf2a52bb3149ca7ba13d37a90c12cfdf454a1cbba/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:170059e1ad89a66c3e7106cae1eaf5bcb6e5a13fa6565c59e1858e862c50f7ec", size = 4286197, upload-time = "2025-08-14T17:21:30.407Z" }, + { url = "https://files.pythonhosted.org/packages/24/30/11867f7182bf13e8768415198d064a1a4f2082fb77c66e404dd12533a51b/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85986d0b516457c088b22dfbcbe6ef060d9ee9d6a120c38f29d4b1104826a8f0", size = 4899752, upload-time = "2025-08-14T17:21:31.914Z" }, + { url = "https://files.pythonhosted.org/packages/07/4b/ff8babcf13db850ecbffce93fe86f50712c35f2949c8e2ce10497eea68f4/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08dc8ccf395e0abeb1425fdda8d3d9b22840ff27358bdcbbe01a0ee1b780b549", size = 5018867, upload-time = "2025-08-14T17:21:33.368Z" }, + { url = "https://files.pythonhosted.org/packages/51/05/5ac0d6564e55a920620a9f82c6e13c299254d933000b96a7056b941e652f/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7fa85619260d86937f95ad6c089968b8f03af3b62623982df468b72fde667ff5", size = 4677657, upload-time = "2025-08-14T17:21:34.998Z" }, + { url = "https://files.pythonhosted.org/packages/27/68/f4cabf3a24f0ac34b82e3f3da2415040b58ec8f0371c849bb72707927525/psqlpy-0.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:461f5bd3a226096ff7fb2e8dc702e48b231d7896151621c1f9f0f1cd9475d8e7", size = 4926305, upload-time = "2025-08-14T17:21:36.55Z" }, + { url = "https://files.pythonhosted.org/packages/ea/90/0cbb0ce1e8083aea902586a65093a9041c182dd28c7a3b4c3028fa8b6588/psqlpy-0.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3199ff6bf5a92e98546288ed23a80c586e663583cc8e15ffdc24ed1cad4b2251", size = 4938958, upload-time = "2025-08-14T17:21:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c9/2abadb734cf55a1c073c4dc08cf78ce75a9d438c8bfeabb6bb42cdb4ff15/psqlpy-0.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e15da8ee6c853397a9c9dc555c3b57f848a7e7a25a0595fdbaa3c19a390160cf", size = 5065920, upload-time = "2025-08-14T17:21:40.117Z" }, + { url = "https://files.pythonhosted.org/packages/09/d6/62d0213295a63c1a4d57e8b22cfe82ff107ab5c59cc1db17de256b163d74/psqlpy-0.11.6-cp312-cp312-win32.whl", hash = "sha256:0e5b482677e21d7f03455105ac4b2ffc11411d3e7409da8870a866b3558c902b", size = 3351654, upload-time = "2025-08-14T17:21:41.766Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/7c2dfc0435bc99afed67ef33b0cab6d9c4a444ae53110abb2d686534f205/psqlpy-0.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:6ef2ab055a11f920c8a021561acc9c1a58d06c215bb8ac992ddf7f79e44f7c89", size = 3766514, upload-time = "2025-08-14T17:21:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/cf/6a/69a76e2f47988109389af3df81700619f6fa2566e368d5e79951e90fa562/psqlpy-0.11.6-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1508a64de1d32f24b7518521fbcfa6e2b775edd615c26491c50acefe617bef36", size = 4284249, upload-time = "2025-08-14T17:21:44.558Z" }, + { url = "https://files.pythonhosted.org/packages/de/78/6d0de2107b8bc9c91ac9db505a831091ac6fda9ef34490f699263e29a009/psqlpy-0.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4e35a360e6f900c2cb7f067a4ab94f7ee0c84eb240ade31f8e743a923b14f184", size = 4494620, upload-time = "2025-08-14T17:21:45.949Z" }, + { url = "https://files.pythonhosted.org/packages/c1/75/1b9d1fe6c3334a2b14a754531fa919f3662c4a3cf324bca32191effa5fc3/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6ddc99e9750a4227ac84caef469d7a5b991df32aca2f8e3a0b01ce870c8c93e", size = 5028860, upload-time = "2025-08-14T17:21:47.478Z" }, + { url = "https://files.pythonhosted.org/packages/9e/af/b946866dbf590411e6558e5d40a7f7a2b5ebc830c692c038c48e6d8789c3/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a0b9905602f41770bd43d85be4d9159db719c2c82127c396625e3a7e15f2dd39", size = 4288707, upload-time = "2025-08-14T17:21:49.413Z" }, + { url = "https://files.pythonhosted.org/packages/c6/81/2c18b3bf2836b5311fc8f451211fe8a3e9fd7e39923fd2bd1afa121dd123/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af8b5e3f013235355e022a7f07179948dbe9645ae990e13cfa01c4bfcba73764", size = 4915394, upload-time = "2025-08-14T17:21:51.311Z" }, + { url = "https://files.pythonhosted.org/packages/df/c7/a6df171d6e12b22d266801543c9b61922744146f827dfc92eea2233b6bdd/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0dceb698fb324729a31b3668eeb65bf107af78bdc14f3d10bf27658242fc3e46", size = 5016572, upload-time = "2025-08-14T17:21:52.848Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/09e63cd42e5ab6337136db09e9c5d56d1f509a41826fa76c94b5b738b382/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:541c8025f94226d2521c4e8de1cbc22a5b180f0bbcd75925fbfc04cd849483d8", size = 4694538, upload-time = "2025-08-14T17:21:54.215Z" }, + { url = "https://files.pythonhosted.org/packages/d6/77/6aaec569b7443321dde4a2c67ca02cc4399b637bf85d22cd835a5a92af40/psqlpy-0.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7e34ddba63cfe66aba152df22c9c6b505974069e6ad53b54389bc46b0fc8e6c", size = 4923742, upload-time = "2025-08-14T17:21:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fd/03b19e43f7a31ddea1a55bed49c3b73a104aa778f5d0c32f378c8d12c9d2/psqlpy-0.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f5c30dabc70b93627d6897054ab2c6c74c65f00ca11f3bc755044f753868c0e4", size = 4947137, upload-time = "2025-08-14T17:21:57.867Z" }, + { url = "https://files.pythonhosted.org/packages/50/bf/c7b5082c3709c3fa2c08d09971b012979a9d879f0a5ed22c7995c94ef95a/psqlpy-0.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b633ae41fd5d1295655a338c595af3f020722af312c3b541b53d1caa6882afe8", size = 5063961, upload-time = "2025-08-14T17:22:00.005Z" }, + { url = "https://files.pythonhosted.org/packages/74/6b/48089ae018f9d2fef0d8859248ef33c6b0d04299f73a554ef3a7680a2673/psqlpy-0.11.6-cp313-cp313-win32.whl", hash = "sha256:6aca8c34509c25e49c651ec59b7e164ea45a63111c0c11bdadc222c8ca714eed", size = 3351887, upload-time = "2025-08-14T17:22:01.422Z" }, + { url = "https://files.pythonhosted.org/packages/f1/74/868413b3be5b07a5b6dae9d73f71718ebb99844e0f072912490ef6f7696a/psqlpy-0.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:8e601faf0e7cf771fa118e89487c805a53271185aea63025b18914feb982ee4a", size = 3767978, upload-time = "2025-08-14T17:22:02.793Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ff/96d672cb6d62b593ec6d2a53072663b72f8b4cc7c782d9b16e63a23d1f8e/psqlpy-0.11.6-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3a36263ac9ed535cb066dd9ffa75eba7c2a3d622ebadf1ee90a1adaf33e1c3dd", size = 4310264, upload-time = "2025-08-14T17:22:04.118Z" }, + { url = "https://files.pythonhosted.org/packages/df/33/8011fc5457c94d7462f667a20a6959f44d3a96ae3f6f9c697974265b3590/psqlpy-0.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:efa42516e911ed9c76ab054fd061d2456f89f20d55807d645a9ad47571234b57", size = 4518025, upload-time = "2025-08-14T17:22:05.529Z" }, + { url = "https://files.pythonhosted.org/packages/95/50/63dccb212fda68b16a9d5169d306ac38d3efdba1b8b6cf74a2cd26979b61/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaaa658b6204103ad7f03dbc762baed3803de9303d94eac551ac05c57758423", size = 5043163, upload-time = "2025-08-14T17:22:06.914Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/7900a705a990d7192f45ebf619affbf7dec4db54f3732a9a1616ee542f8a/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:392a763fc2c992d1e45c89a184d3de87189a9f247995d3e16f0871f67a67b781", size = 4299937, upload-time = "2025-08-14T17:22:08.351Z" }, + { url = "https://files.pythonhosted.org/packages/25/01/621c0d19c863019760a45e95f2e88dc007b12aaf6d8ee49f2957923ce218/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff00b4c53f6f188f48ea8d58f63f8699c623bb6ee302c99bf559f61bc78ca5b8", size = 4926194, upload-time = "2025-08-14T17:22:09.983Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d5/343a6a0f3fa88ab2da42bb4b539d46e68c88f4912865e5963f05afee25d9/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d079161fdaf4e3d2c336309161cc8a1f60bda03562872c479e805f2ccf5e7ea0", size = 5042997, upload-time = "2025-08-14T17:22:11.648Z" }, + { url = "https://files.pythonhosted.org/packages/6b/83/32b399b3a2eecafa06afd5eec465f5abbeff90aa175ae98d9dde4079ea0b/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7afa10100009833a40fbc92993ce200d46081293472f6b68aefeafba206517b5", size = 4700624, upload-time = "2025-08-14T17:22:13.156Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bd/841b47e7f2b08021c2cf42788c6a669e2e3ea2cb09cbe0e58b6ba7c76cfb/psqlpy-0.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:295df1f589e65e285e2cad23b4000572f083a563dd5a3ddd043de3a9e9027ab0", size = 4929964, upload-time = "2025-08-14T17:22:14.839Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c7/9a7f39a74bd2f16f980b599f61c3afd55e245706f4d6d682e7621598c03e/psqlpy-0.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1ea482e2b4df2b7f5393c35f64927c4fc0ecaf57455f41127cdc78307bbb2320", size = 4958544, upload-time = "2025-08-14T17:22:16.471Z" }, + { url = "https://files.pythonhosted.org/packages/be/bc/a3186ce50e3fe76814c9cd1dc8267eb2d79015292ff346691c62e47379d7/psqlpy-0.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f231871393ff627fdb36607b8d4113bf901e3838bf573003ace6177260db61d4", size = 5069641, upload-time = "2025-08-14T17:22:17.969Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1c/d1d1b84a52949bb91f5a1c5208120b61b165fa24faafb8150709c241dae8/psqlpy-0.11.6-cp39-cp39-win32.whl", hash = "sha256:008407cac3591a7cdf45250727df2aaffe1e077aa40d0d5ef858998078c93c5f", size = 3352566, upload-time = "2025-08-14T17:22:19.526Z" }, + { url = "https://files.pythonhosted.org/packages/7c/01/5572e1e647971f0d4db0278256d57ea8ed80f57bddba9b14bd937ae9a9ae/psqlpy-0.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b6dad4f42b00b34b540cc487e923e181726b887074093e3cdea21d51fc67e3a7", size = 3761797, upload-time = "2025-08-14T17:22:21.09Z" }, + { url = "https://files.pythonhosted.org/packages/60/98/f29b17f931ea17ef673c8a2ac817901e663b2db102660a4902997859c2c2/psqlpy-0.11.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:80ed664f40b259f65c5983e529fd32212ffcb5caa5cddb8ed899f3939547c5c7", size = 4303600, upload-time = "2025-08-14T17:22:22.579Z" }, + { url = "https://files.pythonhosted.org/packages/f1/05/bd23d3c2a3254640f5205e71b41dbd8279b6dcb211d3a0834edc9e560ef5/psqlpy-0.11.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b01cc82cf9af3b5237d8059cdfcf41fce656d300f768e948c20358ce94774862", size = 4503850, upload-time = "2025-08-14T17:22:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/94/7f/bcce7a63bc0fca8a43bf81abae97c6cc262c582b4a420252e1eb51c9fb41/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d2eb9dcb0a7f60d045bbe8061b8318b7718a4093eac9cd65fd3c00ebf1544b4", size = 5041335, upload-time = "2025-08-14T17:22:25.843Z" }, + { url = "https://files.pythonhosted.org/packages/dc/09/a385d371f55fb49bc52e0f25c6322b8d49960a080dae43a02f500183e3de/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e14a5539520854e1fd163016a325b39ddbd1edf621492d45383888667847575a", size = 4291607, upload-time = "2025-08-14T17:22:27.342Z" }, + { url = "https://files.pythonhosted.org/packages/5d/2c/86d84124670a1e8900be9f22e662ceda30b110d79229a1255871f1d8f402/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e706d106ca3ce4e67155a229be6e58cf0dcab354be41b2f5a6faf384cc1b049", size = 4921673, upload-time = "2025-08-14T17:22:28.805Z" }, + { url = "https://files.pythonhosted.org/packages/f7/02/89914a07c683c61d128cc341b598e7b3352a6c1e94720033bec88e019dae/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60d4afcf550f8bd2a3ad131b95ad1203149109bc40d07351c4619f79829ed411", size = 5043783, upload-time = "2025-08-14T17:22:30.322Z" }, + { url = "https://files.pythonhosted.org/packages/45/c0/67d3c8c3b5ee9fa562ac48792c84b3a0c8460e13424eb80339aa19dc5ff1/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:809b2cac178d4a707c025963ad93dc5ceef05772f3ee81a1fba664c639eeabcf", size = 4684899, upload-time = "2025-08-14T17:22:32.054Z" }, + { url = "https://files.pythonhosted.org/packages/7c/81/6be3237e807799af8c82634d1c1888c8b4206f4316426b0dbe836dfa2016/psqlpy-0.11.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25f2a6551b8b4ae61e5993705ed8a53d3d55f02036b473379e2c3a8208cbcfc", size = 4933885, upload-time = "2025-08-14T17:22:33.627Z" }, + { url = "https://files.pythonhosted.org/packages/a5/2d/fcc6d54d78635b559ed651ee31192c695549deef40e3f9e2a3e5a99198da/psqlpy-0.11.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a94ed32504d9193a8fa77fd98e9abb7f05d757f75af8e716e67f9a7876449d06", size = 4954441, upload-time = "2025-08-14T17:22:35.12Z" }, + { url = "https://files.pythonhosted.org/packages/04/dc/fb1a8d1f46d35d104a78799ef28ed0e501a92d728a0dc8840bc1cdf550d5/psqlpy-0.11.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e4038d7ccc6aef8d72f64d9c1a69be0174b7f014a01b7a4193b2b9ddd5d3606e", size = 5073296, upload-time = "2025-08-14T17:22:36.899Z" }, + { url = "https://files.pythonhosted.org/packages/73/a1/79aa0ab1765ae842438e4d95cfa5aba092a559a9df53730702a8a4b4729f/psqlpy-0.11.6-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:732d6a85c1a69542eebd26fb5b3062eb496cb8d4577d814b6ec523a375b9c0e1", size = 4304262, upload-time = "2025-08-14T17:22:38.463Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c6/b265bd820029867d2ae0610b26c220bf67227be2f5748ba769c59d669556/psqlpy-0.11.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:08a29fd10449d4784c8d90a34265a9f642eec7ede5a04eba3fc714ce13e3531d", size = 4503291, upload-time = "2025-08-14T17:22:39.896Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8a/6f0ca1741f7760183586a5b7dbb2026cb0e39aa53ec6d5959abe965b9d93/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a43c734f27d1dfcc68d45a4462b6274d085f6cafdd7d0699bc10306f956dee", size = 5041363, upload-time = "2025-08-14T17:22:41.35Z" }, + { url = "https://files.pythonhosted.org/packages/51/eb/2494e4369c09e7a9f4db2181eebbcf6abdd31cc56b5c140036b4cd927296/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbcf90d8f3910a1d5039367787bd5a5e354bcba5e2d061c0273c476b68ea8834", size = 4293301, upload-time = "2025-08-14T17:22:43.092Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/1d2c9256bcfc9ecd190d4deb7a89eb19acd85b66aba205b829ad5a19efda/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463cfdf4a0c84f8d9a6a024a47d1a963db4321cfdca5e9f0c943c91264a64eeb", size = 4922220, upload-time = "2025-08-14T17:22:44.537Z" }, + { url = "https://files.pythonhosted.org/packages/03/75/8385f59670aaeeb2acd091978505605a12bede7a3de6c66f514bcd22b8c3/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f601e96a5d11c81dab2c0de3451203a7c5d148f313fc9774dfd258d4683ea78c", size = 5043188, upload-time = "2025-08-14T17:22:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d5/383b4d9ff35af7dab12532d4b0bb9c2101ad0bf8c817d3016f6a39e55fb1/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782b0eed2587e91a46dd54300273bbfc55a5d358fd0d9d28311647e2e8412e03", size = 4703050, upload-time = "2025-08-14T17:22:47.852Z" }, + { url = "https://files.pythonhosted.org/packages/2e/52/a13570a371a9f268ded0ee6c2534ea3655878bccea905c1149a351f1ab6b/psqlpy-0.11.6-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0c63c9a41ec6a6abde2c7b9dcb271c08896259911e4268124bfdd632f2c7435", size = 4933316, upload-time = "2025-08-14T17:22:49.418Z" }, + { url = "https://files.pythonhosted.org/packages/11/c4/3c51b817ebc8b8344cc27d68c3d2fec00e0291ce9350acd2a5e16144706c/psqlpy-0.11.6-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:33768b71afde498211f271f5ea0c430ed8ddfdc7ee5f541de4761222d8633c3f", size = 4956086, upload-time = "2025-08-14T17:22:51.008Z" }, + { url = "https://files.pythonhosted.org/packages/b9/bc/2f6e5ba5ca7890c56bfe59b95536557f15ddc17171234edda342e53415c7/psqlpy-0.11.6-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:820fbeac25919fb463de3a50594ee12cd434fa539e483adfda87404dfa602b73", size = 5073593, upload-time = "2025-08-14T17:22:52.906Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/de38383c0c01643515df8a067db96b1be03a9f08d273865fa089b61c9a27/psqlpy-0.11.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:713ab646c6eb4a79bb012fceddc8261d8945f973a4e569ed66bd75efea538145", size = 4303717, upload-time = "2025-08-14T17:22:54.818Z" }, + { url = "https://files.pythonhosted.org/packages/66/01/82f4d27a6b34b1e451a584d42f7a3cc59cdafa8fa4e4a19fe5ff06d46cd0/psqlpy-0.11.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c5edca9a74a5321049fd26317941fe997a7f860ee5c18fa1210ac57849b1fa50", size = 4502218, upload-time = "2025-08-14T17:22:56.401Z" }, + { url = "https://files.pythonhosted.org/packages/9a/38/1fee83a44052ce338838839574fc6247b73809093d8ec1045e01943a8898/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3751b11b9852204d9d7f9a4a0013885e50b1efe9c519705ea3d48681c517dd9a", size = 5042449, upload-time = "2025-08-14T17:22:58.149Z" }, + { url = "https://files.pythonhosted.org/packages/30/92/2d7822567f94221205057ba6c31b5e300dc092a2774574b14e340185ce3a/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a218444193d30305088fb10b5f914454213d30ce72ec1fbc2d704955611a5d6f", size = 4294799, upload-time = "2025-08-14T17:22:59.711Z" }, + { url = "https://files.pythonhosted.org/packages/26/41/8f62d6fee34d829000e9c7b6ded565af6e477bf8acc5d86c05be85cc0db3/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:726ff110b1165ad500f8f815710d6c74e2f80f96f8e6837e01fe0a0e5d70584a", size = 4921904, upload-time = "2025-08-14T17:23:01.226Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/7e0250d752b3a9e80546ab80083c3c1fb783cda41af0a8ab4292a795e455/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f84052981060673020c9b0fe0531df067df079350b9155a600f310915f738b27", size = 5044055, upload-time = "2025-08-14T17:23:02.871Z" }, + { url = "https://files.pythonhosted.org/packages/af/b6/a73c0627d2b522929762fa471df25da15b174a61930fca6bf3ee9e46e3e8/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3607ba741ada381bb672618c0fe2cc8e0c4bc559cdd444d17c9079947ad94fa8", size = 4684160, upload-time = "2025-08-14T17:23:04.702Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d7/d76c863875b0e7385879a0502d2f67f848bc5f94da9634633966d467f0cb/psqlpy-0.11.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0ee9c153fb7500da1d717faecd28d99994cb8419dbe0be4b89e8821b4e9291", size = 4934082, upload-time = "2025-08-14T17:23:06.524Z" }, + { url = "https://files.pythonhosted.org/packages/23/40/613fe3a2139e131e84d138ff9e05ff6194790a8fe402bc0e801b3abb8b42/psqlpy-0.11.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:f06cd4e031364c9faa41be8481bcce2876eb7ba1b1b769aa0edb05af17af3b1b", size = 4954714, upload-time = "2025-08-14T17:23:08.176Z" }, + { url = "https://files.pythonhosted.org/packages/4d/04/3abf261a6ad16c92c1b14ae0858ae349c3836820e8aae0d507094fa127df/psqlpy-0.11.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d4a128962d01a82a9e0447e4626916d2f1ae9722d0c7898009d8c2a6e0d0ea2b", size = 5073280, upload-time = "2025-08-14T17:23:09.706Z" }, ] [[package]] @@ -4011,16 +4011,15 @@ wheels = [ [[package]] name = "pytest-sugar" -version = "1.0.0" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "packaging" }, { name = "pytest" }, { name = "termcolor" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f5/ac/5754f5edd6d508bc6493bc37d74b928f102a5fff82d9a80347e180998f08/pytest-sugar-1.0.0.tar.gz", hash = "sha256:6422e83258f5b0c04ce7c632176c7732cab5fdb909cb39cca5c9139f81276c0a", size = 14992, upload-time = "2024-02-01T18:30:36.735Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/fe/012ae5c8cd4592d71e77c992a965064724269f4e60e377d5ce7b5ae01a19/pytest-sugar-1.1.0.tar.gz", hash = "sha256:53138645cabc311a677bb09c307eab41681a563e498318cd2a0d7cd184837af4", size = 16331, upload-time = "2025-08-16T16:49:45.568Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/fb/889f1b69da2f13691de09a111c16c4766a433382d44aa0ecf221deded44a/pytest_sugar-1.0.0-py3-none-any.whl", hash = "sha256:70ebcd8fc5795dc457ff8b69d266a4e2e8a74ae0c3edc749381c64b5246c8dfd", size = 10171, upload-time = "2024-02-01T18:30:29.395Z" }, + { url = "https://files.pythonhosted.org/packages/ac/13/4d703e1c389de100a4a943a4d1b2a315b787dffaff643fdaa0ffa13f985a/pytest_sugar-1.1.0-py3-none-any.whl", hash = "sha256:c853866512288f1b679efc10c565303de4617854287e977781f07904f4560668", size = 11409, upload-time = "2025-08-16T16:49:44.601Z" }, ] [[package]] @@ -4287,27 +4286,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4b/da/5bd7565be729e86e1442dad2c9a364ceeff82227c2dece7c29697a9795eb/ruff-0.12.8.tar.gz", hash = "sha256:4cb3a45525176e1009b2b64126acf5f9444ea59066262791febf55e40493a033", size = 5242373, upload-time = "2025-08-07T19:05:47.268Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/1e/c843bfa8ad1114fab3eb2b78235dda76acd66384c663a4e0415ecc13aa1e/ruff-0.12.8-py3-none-linux_armv6l.whl", hash = "sha256:63cb5a5e933fc913e5823a0dfdc3c99add73f52d139d6cd5cc8639d0e0465513", size = 11675315, upload-time = "2025-08-07T19:05:06.15Z" }, - { url = "https://files.pythonhosted.org/packages/24/ee/af6e5c2a8ca3a81676d5480a1025494fd104b8896266502bb4de2a0e8388/ruff-0.12.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9a9bbe28f9f551accf84a24c366c1aa8774d6748438b47174f8e8565ab9dedbc", size = 12456653, upload-time = "2025-08-07T19:05:09.759Z" }, - { url = "https://files.pythonhosted.org/packages/99/9d/e91f84dfe3866fa648c10512904991ecc326fd0b66578b324ee6ecb8f725/ruff-0.12.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2fae54e752a3150f7ee0e09bce2e133caf10ce9d971510a9b925392dc98d2fec", size = 11659690, upload-time = "2025-08-07T19:05:12.551Z" }, - { url = "https://files.pythonhosted.org/packages/fe/ac/a363d25ec53040408ebdd4efcee929d48547665858ede0505d1d8041b2e5/ruff-0.12.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0acbcf01206df963d9331b5838fb31f3b44fa979ee7fa368b9b9057d89f4a53", size = 11896923, upload-time = "2025-08-07T19:05:14.821Z" }, - { url = "https://files.pythonhosted.org/packages/58/9f/ea356cd87c395f6ade9bb81365bd909ff60860975ca1bc39f0e59de3da37/ruff-0.12.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae3e7504666ad4c62f9ac8eedb52a93f9ebdeb34742b8b71cd3cccd24912719f", size = 11477612, upload-time = "2025-08-07T19:05:16.712Z" }, - { url = "https://files.pythonhosted.org/packages/1a/46/92e8fa3c9dcfd49175225c09053916cb97bb7204f9f899c2f2baca69e450/ruff-0.12.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb82efb5d35d07497813a1c5647867390a7d83304562607f3579602fa3d7d46f", size = 13182745, upload-time = "2025-08-07T19:05:18.709Z" }, - { url = "https://files.pythonhosted.org/packages/5e/c4/f2176a310f26e6160deaf661ef60db6c3bb62b7a35e57ae28f27a09a7d63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dbea798fc0065ad0b84a2947b0aff4233f0cb30f226f00a2c5850ca4393de609", size = 14206885, upload-time = "2025-08-07T19:05:21.025Z" }, - { url = "https://files.pythonhosted.org/packages/87/9d/98e162f3eeeb6689acbedbae5050b4b3220754554526c50c292b611d3a63/ruff-0.12.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49ebcaccc2bdad86fd51b7864e3d808aad404aab8df33d469b6e65584656263a", size = 13639381, upload-time = "2025-08-07T19:05:23.423Z" }, - { url = "https://files.pythonhosted.org/packages/81/4e/1b7478b072fcde5161b48f64774d6edd59d6d198e4ba8918d9f4702b8043/ruff-0.12.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ac9c570634b98c71c88cb17badd90f13fc076a472ba6ef1d113d8ed3df109fb", size = 12613271, upload-time = "2025-08-07T19:05:25.507Z" }, - { url = "https://files.pythonhosted.org/packages/e8/67/0c3c9179a3ad19791ef1b8f7138aa27d4578c78700551c60d9260b2c660d/ruff-0.12.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:560e0cd641e45591a3e42cb50ef61ce07162b9c233786663fdce2d8557d99818", size = 12847783, upload-time = "2025-08-07T19:05:28.14Z" }, - { url = "https://files.pythonhosted.org/packages/4e/2a/0b6ac3dd045acf8aa229b12c9c17bb35508191b71a14904baf99573a21bd/ruff-0.12.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:71c83121512e7743fba5a8848c261dcc454cafb3ef2934a43f1b7a4eb5a447ea", size = 11702672, upload-time = "2025-08-07T19:05:30.413Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ee/f9fdc9f341b0430110de8b39a6ee5fa68c5706dc7c0aa940817947d6937e/ruff-0.12.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:de4429ef2ba091ecddedd300f4c3f24bca875d3d8b23340728c3cb0da81072c3", size = 11440626, upload-time = "2025-08-07T19:05:32.492Z" }, - { url = "https://files.pythonhosted.org/packages/89/fb/b3aa2d482d05f44e4d197d1de5e3863feb13067b22c571b9561085c999dc/ruff-0.12.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a2cab5f60d5b65b50fba39a8950c8746df1627d54ba1197f970763917184b161", size = 12462162, upload-time = "2025-08-07T19:05:34.449Z" }, - { url = "https://files.pythonhosted.org/packages/18/9f/5c5d93e1d00d854d5013c96e1a92c33b703a0332707a7cdbd0a4880a84fb/ruff-0.12.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:45c32487e14f60b88aad6be9fd5da5093dbefb0e3e1224131cb1d441d7cb7d46", size = 12913212, upload-time = "2025-08-07T19:05:36.541Z" }, - { url = "https://files.pythonhosted.org/packages/71/13/ab9120add1c0e4604c71bfc2e4ef7d63bebece0cfe617013da289539cef8/ruff-0.12.8-py3-none-win32.whl", hash = "sha256:daf3475060a617fd5bc80638aeaf2f5937f10af3ec44464e280a9d2218e720d3", size = 11694382, upload-time = "2025-08-07T19:05:38.468Z" }, - { url = "https://files.pythonhosted.org/packages/f6/dc/a2873b7c5001c62f46266685863bee2888caf469d1edac84bf3242074be2/ruff-0.12.8-py3-none-win_amd64.whl", hash = "sha256:7209531f1a1fcfbe8e46bcd7ab30e2f43604d8ba1c49029bb420b103d0b5f76e", size = 12740482, upload-time = "2025-08-07T19:05:40.391Z" }, - { url = "https://files.pythonhosted.org/packages/cb/5c/799a1efb8b5abab56e8a9f2a0b72d12bd64bb55815e9476c7d0a2887d2f7/ruff-0.12.8-py3-none-win_arm64.whl", hash = "sha256:c90e1a334683ce41b0e7a04f41790c429bf5073b62c1ae701c9dc5b3d14f0749", size = 11884718, upload-time = "2025-08-07T19:05:42.866Z" }, +version = "0.12.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/45/2e403fa7007816b5fbb324cb4f8ed3c7402a927a0a0cb2b6279879a8bfdc/ruff-0.12.9.tar.gz", hash = "sha256:fbd94b2e3c623f659962934e52c2bea6fc6da11f667a427a368adaf3af2c866a", size = 5254702, upload-time = "2025-08-14T16:08:55.2Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/20/53bf098537adb7b6a97d98fcdebf6e916fcd11b2e21d15f8c171507909cc/ruff-0.12.9-py3-none-linux_armv6l.whl", hash = "sha256:fcebc6c79fcae3f220d05585229463621f5dbf24d79fdc4936d9302e177cfa3e", size = 11759705, upload-time = "2025-08-14T16:08:12.968Z" }, + { url = "https://files.pythonhosted.org/packages/20/4d/c764ee423002aac1ec66b9d541285dd29d2c0640a8086c87de59ebbe80d5/ruff-0.12.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aed9d15f8c5755c0e74467731a007fcad41f19bcce41cd75f768bbd687f8535f", size = 12527042, upload-time = "2025-08-14T16:08:16.54Z" }, + { url = "https://files.pythonhosted.org/packages/8b/45/cfcdf6d3eb5fc78a5b419e7e616d6ccba0013dc5b180522920af2897e1be/ruff-0.12.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5b15ea354c6ff0d7423814ba6d44be2807644d0c05e9ed60caca87e963e93f70", size = 11724457, upload-time = "2025-08-14T16:08:18.686Z" }, + { url = "https://files.pythonhosted.org/packages/72/e6/44615c754b55662200c48bebb02196dbb14111b6e266ab071b7e7297b4ec/ruff-0.12.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d596c2d0393c2502eaabfef723bd74ca35348a8dac4267d18a94910087807c53", size = 11949446, upload-time = "2025-08-14T16:08:21.059Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d1/9b7d46625d617c7df520d40d5ac6cdcdf20cbccb88fad4b5ecd476a6bb8d/ruff-0.12.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b15599931a1a7a03c388b9c5df1bfa62be7ede6eb7ef753b272381f39c3d0ff", size = 11566350, upload-time = "2025-08-14T16:08:23.433Z" }, + { url = "https://files.pythonhosted.org/packages/59/20/b73132f66f2856bc29d2d263c6ca457f8476b0bbbe064dac3ac3337a270f/ruff-0.12.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d02faa2977fb6f3f32ddb7828e212b7dd499c59eb896ae6c03ea5c303575756", size = 13270430, upload-time = "2025-08-14T16:08:25.837Z" }, + { url = "https://files.pythonhosted.org/packages/a2/21/eaf3806f0a3d4c6be0a69d435646fba775b65f3f2097d54898b0fd4bb12e/ruff-0.12.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:17d5b6b0b3a25259b69ebcba87908496e6830e03acfb929ef9fd4c58675fa2ea", size = 14264717, upload-time = "2025-08-14T16:08:27.907Z" }, + { url = "https://files.pythonhosted.org/packages/d2/82/1d0c53bd37dcb582b2c521d352fbf4876b1e28bc0d8894344198f6c9950d/ruff-0.12.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72db7521860e246adbb43f6ef464dd2a532ef2ef1f5dd0d470455b8d9f1773e0", size = 13684331, upload-time = "2025-08-14T16:08:30.352Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2f/1c5cf6d8f656306d42a686f1e207f71d7cebdcbe7b2aa18e4e8a0cb74da3/ruff-0.12.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a03242c1522b4e0885af63320ad754d53983c9599157ee33e77d748363c561ce", size = 12739151, upload-time = "2025-08-14T16:08:32.55Z" }, + { url = "https://files.pythonhosted.org/packages/47/09/25033198bff89b24d734e6479e39b1968e4c992e82262d61cdccaf11afb9/ruff-0.12.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fc83e4e9751e6c13b5046d7162f205d0a7bac5840183c5beebf824b08a27340", size = 12954992, upload-time = "2025-08-14T16:08:34.816Z" }, + { url = "https://files.pythonhosted.org/packages/52/8e/d0dbf2f9dca66c2d7131feefc386523404014968cd6d22f057763935ab32/ruff-0.12.9-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:881465ed56ba4dd26a691954650de6ad389a2d1fdb130fe51ff18a25639fe4bb", size = 12899569, upload-time = "2025-08-14T16:08:36.852Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b614d7c08515b1428ed4d3f1d4e3d687deffb2479703b90237682586fa66/ruff-0.12.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:43f07a3ccfc62cdb4d3a3348bf0588358a66da756aa113e071b8ca8c3b9826af", size = 11751983, upload-time = "2025-08-14T16:08:39.314Z" }, + { url = "https://files.pythonhosted.org/packages/58/d6/383e9f818a2441b1a0ed898d7875f11273f10882f997388b2b51cb2ae8b5/ruff-0.12.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:07adb221c54b6bba24387911e5734357f042e5669fa5718920ee728aba3cbadc", size = 11538635, upload-time = "2025-08-14T16:08:41.297Z" }, + { url = "https://files.pythonhosted.org/packages/20/9c/56f869d314edaa9fc1f491706d1d8a47747b9d714130368fbd69ce9024e9/ruff-0.12.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f5cd34fabfdea3933ab85d72359f118035882a01bff15bd1d2b15261d85d5f66", size = 12534346, upload-time = "2025-08-14T16:08:43.39Z" }, + { url = "https://files.pythonhosted.org/packages/bd/4b/d8b95c6795a6c93b439bc913ee7a94fda42bb30a79285d47b80074003ee7/ruff-0.12.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:f6be1d2ca0686c54564da8e7ee9e25f93bdd6868263805f8c0b8fc6a449db6d7", size = 13017021, upload-time = "2025-08-14T16:08:45.889Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c1/5f9a839a697ce1acd7af44836f7c2181cdae5accd17a5cb85fcbd694075e/ruff-0.12.9-py3-none-win32.whl", hash = "sha256:cc7a37bd2509974379d0115cc5608a1a4a6c4bff1b452ea69db83c8855d53f93", size = 11734785, upload-time = "2025-08-14T16:08:48.062Z" }, + { url = "https://files.pythonhosted.org/packages/fa/66/cdddc2d1d9a9f677520b7cfc490d234336f523d4b429c1298de359a3be08/ruff-0.12.9-py3-none-win_amd64.whl", hash = "sha256:6fb15b1977309741d7d098c8a3cb7a30bc112760a00fb6efb7abc85f00ba5908", size = 12840654, upload-time = "2025-08-14T16:08:50.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/fd/669816bc6b5b93b9586f3c1d87cd6bc05028470b3ecfebb5938252c47a35/ruff-0.12.9-py3-none-win_arm64.whl", hash = "sha256:63c8c819739d86b96d500cce885956a1a48ab056bbcbc61b747ad494b2485089", size = 11949623, upload-time = "2025-08-14T16:08:52.233Z" }, ] [[package]] @@ -4321,16 +4321,16 @@ wheels = [ [[package]] name = "shibuya" -version = "2025.7.24" +version = "2025.8.16" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/1f/6714f92988e31dc9c59fe1abd84daaa8bd41747d0165b6b96a16d9beaea2/shibuya-2025.7.24.tar.gz", hash = "sha256:c4774702acc11a04847d3ae822b25e71429288cecdb69089ece00b33f7767168", size = 80851, upload-time = "2025-07-24T01:07:32.834Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/42/497cb621cccd43ca81dba8dc1d4fed1099cbcde3eea7e4e0fbfd1222c212/shibuya-2025.8.16.tar.gz", hash = "sha256:1c639cf646a33026b48eee5c29fd51476e767709f7a87f5656d21d2665d7ee22", size = 80785, upload-time = "2025-08-16T13:59:27.722Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/37/030645781f2219b3382bfe2061c0956303f8c59be33f192b3dbc9914750f/shibuya-2025.7.24-py3-none-any.whl", hash = "sha256:4b10f903e9f8dfbe1c511cb116b2737033764d123ec988afd5eec0d484aac95f", size = 96555, upload-time = "2025-07-24T01:07:31.272Z" }, + { url = "https://files.pythonhosted.org/packages/d9/fc/bc2094308aed85b6d5e098c44bf1dce7db98f0635138cf27d82f638afd43/shibuya-2025.8.16-py3-none-any.whl", hash = "sha256:90739b5d14ac38b44b99a392b9b0be7e83137890f36420d52a2079cc29fc1a74", size = 96467, upload-time = "2025-08-16T13:59:26.131Z" }, ] [[package]] @@ -4957,7 +4957,7 @@ wheels = [ [[package]] name = "sqlspec" -version = "0.17.1" +version = "0.18.0" source = { editable = "." } dependencies = [ { name = "eval-type-backport", marker = "python_full_version < '3.10'" },