diff --git a/.claude/agents/docs-vision.md b/.claude/agents/docs-vision.md index faab1d115..22c312b38 100644 --- a/.claude/agents/docs-vision.md +++ b/.claude/agents/docs-vision.md @@ -83,7 +83,7 @@ from sqlspec.adapters.asyncpg.config import AsyncpgConfig config = AsyncpgConfig( dsn="postgresql://user:pass@localhost/db", - pool_config={ + connection_config={ "min_size": 10, "max_size": 20, "max_inactive_connection_lifetime": 300 @@ -393,7 +393,7 @@ When implementing optional type handlers (NumPy, pgvector, etc.): ```python class AdapterConfig(AsyncDatabaseConfig): async def _create_pool(self): - config = dict(self.pool_config) + config = dict(self.connection_config) if self.driver_features.get("enable_feature", False): config["session_callback"] = self._init_connection diff --git a/.claude/agents/expert.md b/.claude/agents/expert.md index d2956c25b..c5c77e566 100644 --- a/.claude/agents/expert.md +++ b/.claude/agents/expert.md @@ -559,7 +559,7 @@ mcp__context7__get-library-docs( Edit( file_path="sqlspec/adapters/asyncpg/config.py", old_string="# TODO: Add pooling", - new_string="pool = await asyncpg.create_pool(**pool_config)" + new_string="pool = await asyncpg.create_pool(**connection_config)" ) # 4. Test locally diff --git a/.claude/commands/bootstrap.md b/.claude/commands/bootstrap.md index ff27e52de..0f0e14280 100644 --- a/.claude/commands/bootstrap.md +++ b/.claude/commands/bootstrap.md @@ -359,7 +359,7 @@ Database-specific implementation for {adapter_name}. from sqlspec.adapters.{adapter_name} import {adapter_name.capitalize()}Config config = {adapter_name.capitalize()}Config( - pool_config={{"dsn": "..."}}, + connection_config={{"dsn": "..."}}, driver_features={{}} ) ``` @@ -419,7 +419,7 @@ pip install sqlspec[{adapter_name}] from sqlspec.adapters.{adapter_name} import {adapter_name.capitalize()}Config config = {adapter_name.capitalize()}Config( - pool_config={{"dsn": "..."}} + connection_config={{"dsn": "..."}} ) ``` diff --git a/.claude/commands/explore.md b/.claude/commands/explore.md index 2c3e3f382..1d7f5e910 100644 --- a/.claude/commands/explore.md +++ b/.claude/commands/explore.md @@ -276,7 +276,7 @@ Show minimal working examples: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, parameter_profile=ParameterProfile( style="numbered", prefix="$" @@ -425,7 +425,7 @@ ParameterProfile definitions and the convert_parameters() function. ```python from sqlspec.adapters.asyncpg import AsyncpgConfig -config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) +config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) # Input: "SELECT * FROM users WHERE id = :id" # Output: "SELECT * FROM users WHERE id = $1" diff --git a/.claude/skills/README.md b/.claude/skills/README.md index 2c8331814..22a0c1a52 100644 --- a/.claude/skills/README.md +++ b/.claude/skills/README.md @@ -37,7 +37,7 @@ Detailed reference guides for specific SQLSpec usage patterns: | Guide | Purpose | Key Topics | |-------|---------|-----------| -| [configuration.md](sqlspec-usage/patterns/configuration.md) | Configuration across all adapters | pool_config, driver_features, extension_config, multi-database | +| [configuration.md](sqlspec-usage/patterns/configuration.md) | Configuration across all adapters | connection_config, driver_features, extension_config, multi-database | | [queries.md](sqlspec-usage/patterns/queries.md) | Query execution patterns | Parameter binding, result handling, transactions, type mapping | | [frameworks.md](sqlspec-usage/patterns/frameworks.md) | Framework integration | Litestar, FastAPI, Starlette, Flask patterns | | [migrations.md](sqlspec-usage/patterns/migrations.md) | Database migrations | CLI commands, hybrid versioning, programmatic control | @@ -127,7 +127,7 @@ The `.claude/bootstrap.md` includes automatic skill creation for SQLSpec project All skills include sections on anti-patterns to avoid: -- Configuration mistakes (missing pool_config, duplicate session keys) +- Configuration mistakes (missing connection_config, duplicate session keys) - Session management errors (no context managers, mixing sync/async) - Query execution issues (SQL injection, wrong parameter style) - Framework integration problems (duplicate keys, missing middleware) diff --git a/.claude/skills/sqlspec_adapters/aiosqlite.md b/.claude/skills/sqlspec_adapters/aiosqlite.md index 7f6dd8618..e9bcd182e 100644 --- a/.claude/skills/sqlspec_adapters/aiosqlite.md +++ b/.claude/skills/sqlspec_adapters/aiosqlite.md @@ -30,7 +30,7 @@ from sqlspec.adapters.aiosqlite import ( ) config = AiosqliteConfig( - pool_config={ + connection_config={ # Database path "database": "file::memory:?cache=shared", # Default shared memory # OR: "app.db", # File-based database @@ -92,7 +92,7 @@ result = await session.execute( ```python # Configure async pool config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "app.db", "pool_size": 10, # 10 concurrent connections "connect_timeout": 30.0, # Wait up to 30s for connection @@ -114,7 +114,7 @@ await config.close_pool() ```python # Shared memory database (default) config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "file::memory:?cache=shared", # All connections see same data "uri": True, } @@ -216,7 +216,7 @@ from litestar.contrib.sqlspec import SQLSpecConfig, SQLSpecPlugin sqlspec_config = SQLSpecConfig( configs=[ AiosqliteConfig( - pool_config={"database": "app.db", "pool_size": 10}, + connection_config={"database": "app.db", "pool_size": 10}, extension_config={ "litestar": { "commit_mode": "autocommit", @@ -251,7 +251,7 @@ from fastapi import FastAPI, Depends from contextlib import asynccontextmanager config = AiosqliteConfig( - pool_config={"database": "app.db"} + connection_config={"database": "app.db"} ) @asynccontextmanager @@ -283,7 +283,7 @@ from starlette.requests import Request from starlette.responses import JSONResponse config = AiosqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -328,7 +328,7 @@ polars_df = pl.from_arrow(arrow_table) ```python # Optimize pool for workload config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "app.db", "pool_size": 20, # High concurrency "connect_timeout": 60.0, # Long timeout for slow startup @@ -342,7 +342,7 @@ config = AiosqliteConfig( ```python config = AiosqliteConfig( - pool_config={"database": "app.db"} + connection_config={"database": "app.db"} ) # Enable WAL mode on startup @@ -356,7 +356,7 @@ async with config.provide_session() as session: ```python config = AiosqliteConfig( - pool_config={ + connection_config={ "cached_statements": 256, # Cache 256 prepared statements } ) @@ -388,7 +388,7 @@ for user_id in range(1000): Enable WAL mode or increase timeout: ```python config = AiosqliteConfig( - pool_config={ + connection_config={ "timeout": 30.0, # Wait longer for locks } ) @@ -403,7 +403,7 @@ async with config.provide_session() as session: Increase pool size: ```python config = AiosqliteConfig( - pool_config={ + connection_config={ "pool_size": 20, # More connections "connect_timeout": 60.0, # Wait longer } @@ -415,7 +415,7 @@ config = AiosqliteConfig( Increase operation timeout for slow queries: ```python config = AiosqliteConfig( - pool_config={ + connection_config={ "operation_timeout": 30.0, # 30s for slow queries } ) diff --git a/.claude/skills/sqlspec_adapters/asyncmy.md b/.claude/skills/sqlspec_adapters/asyncmy.md index 8a9018357..c196bd34f 100644 --- a/.claude/skills/sqlspec_adapters/asyncmy.md +++ b/.claude/skills/sqlspec_adapters/asyncmy.md @@ -26,7 +26,7 @@ This adapter provides high-performance asynchronous connectivity to MySQL 5.7+, from sqlspec.adapters.asyncmy import AsyncmyConfig, AsyncmyDriverFeatures config = AsyncmyConfig( - pool_config={ + connection_config={ # Connection parameters: "host": "localhost", "port": 3306, @@ -61,7 +61,7 @@ async with config.provide_session() as session: ```python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql.example.com", "port": 3306, "user": "myuser", @@ -81,7 +81,7 @@ config = AsyncmyConfig( ```python config = AsyncmyConfig( - pool_config={ + connection_config={ "unix_socket": "/var/run/mysqld/mysqld.sock", "user": "myuser", "password": "mypass", @@ -96,7 +96,7 @@ config = AsyncmyConfig( from asyncmy.cursors import DictCursor config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "user": "myuser", "password": "mypass", @@ -182,7 +182,7 @@ def orjson_deserializer(s): return orjson.loads(s) config = AsyncmyConfig( - pool_config={...}, + connection_config={...}, driver_features={ "json_serializer": orjson_serializer, "json_deserializer": orjson_deserializer, @@ -223,7 +223,7 @@ Asyncmy provides async connection pooling for high concurrency: ```python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "user": "myuser", "password": "mypass", @@ -377,7 +377,7 @@ result = await session.execute(""" ```python # Reduce pool size config = AsyncmyConfig( - pool_config={ + connection_config={ "maxsize": 10, # Reduce from 20 } ) @@ -394,7 +394,7 @@ config = AsyncmyConfig( ```python # Increase timeouts config = AsyncmyConfig( - pool_config={ + connection_config={ "connect_timeout": 30, # Longer connect timeout "pool_recycle": 1800, # Recycle more frequently } @@ -432,7 +432,7 @@ config = AsyncmyConfig( ```python # Use utf8mb4 for full Unicode support config = AsyncmyConfig( - pool_config={ + connection_config={ "charset": "utf8mb4", } ) @@ -451,7 +451,7 @@ await session.execute(""" ```python # Verify SSL configuration config = AsyncmyConfig( - pool_config={ + connection_config={ "ssl": { "ca": "/path/to/ca-cert.pem", # Use absolute path "check_hostname": True, @@ -461,7 +461,7 @@ config = AsyncmyConfig( # OR disable SSL for local development (NOT production!) config = AsyncmyConfig( - pool_config={ + connection_config={ "ssl": None, } ) diff --git a/.claude/skills/sqlspec_adapters/asyncpg.md b/.claude/skills/sqlspec_adapters/asyncpg.md index 204082589..92f030209 100644 --- a/.claude/skills/sqlspec_adapters/asyncpg.md +++ b/.claude/skills/sqlspec_adapters/asyncpg.md @@ -22,7 +22,7 @@ Expert guidance for using SQLSpec's AsyncPG adapter for PostgreSQL. AsyncPG is t from sqlspec.adapters.asyncpg import AsyncpgConfig, AsyncpgDriverFeatures config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:pass@localhost:5432/dbname", # OR individual parameters: "host": "localhost", @@ -78,7 +78,7 @@ result = await session.execute( ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "my-service-account@project.iam", "database": "mydb", }, @@ -95,7 +95,7 @@ config = AsyncpgConfig( ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "my-service-account@project.iam", "database": "mydb", }, @@ -116,7 +116,7 @@ from pgvector.asyncpg import register_vector # Auto-registered if pgvector installed config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, driver_features={"enable_pgvector": True} # Auto-detected ) @@ -161,7 +161,7 @@ results = await session.execute_stack(stack) ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/db", "min_size": 10, # Keep 10 connections ready "max_size": 20, # Allow up to 20 total @@ -252,7 +252,7 @@ pg_isready -h localhost -p 5432 Increase pool size or reduce connection lifetime: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "max_size": 40, # Increase "timeout": 120.0, # Longer timeout } diff --git a/.claude/skills/sqlspec_adapters/duckdb.md b/.claude/skills/sqlspec_adapters/duckdb.md index d655b3056..018da9787 100644 --- a/.claude/skills/sqlspec_adapters/duckdb.md +++ b/.claude/skills/sqlspec_adapters/duckdb.md @@ -32,7 +32,7 @@ from sqlspec.adapters.duckdb import ( ) config = DuckDBConfig( - pool_config={ + connection_config={ # Database path (defaults to ":memory:shared_db") "database": ":memory:shared_db", # Shared in-memory DB # OR: "analytics.duckdb", # Persistent file @@ -201,7 +201,7 @@ session.execute("SELECT * FROM read_csv_auto('s3://bucket/data.csv')") ```python config = DuckDBConfig( - pool_config={ + connection_config={ "allow_community_extensions": True, }, driver_features={ @@ -234,7 +234,7 @@ session.execute(""" ```python config = DuckDBConfig( - pool_config={ + connection_config={ "allow_persistent_secrets": True, "enable_external_access": True, }, @@ -403,7 +403,7 @@ result = session.execute(""" ```python config = DuckDBConfig( - pool_config={ + connection_config={ "threads": 8, # Use 8 threads for query execution } ) @@ -420,7 +420,7 @@ result = session.execute(""" ```python config = DuckDBConfig( - pool_config={ + connection_config={ "enable_object_cache": True, "parquet_metadata_cache": "enabled", } @@ -441,7 +441,7 @@ for i in range(10): ```python # DuckDB uses thread-local connections config = DuckDBConfig( - pool_config={ + connection_config={ "database": ":memory:shared_db", # Shared across threads "pool_min_size": 1, "pool_max_size": 4, @@ -481,7 +481,7 @@ with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor: Install extension explicitly: ```python config = DuckDBConfig( - pool_config={ + connection_config={ "autoinstall_known_extensions": True, }, driver_features={ @@ -495,7 +495,7 @@ config = DuckDBConfig( Or enable community extensions: ```python config = DuckDBConfig( - pool_config={ + connection_config={ "allow_community_extensions": True, } ) @@ -509,7 +509,7 @@ import os os.makedirs("data", exist_ok=True) config = DuckDBConfig( - pool_config={"database": "data/analytics.duckdb"} + connection_config={"database": "data/analytics.duckdb"} ) ``` @@ -518,7 +518,7 @@ config = DuckDBConfig( Increase memory limit: ```python config = DuckDBConfig( - pool_config={ + connection_config={ "memory_limit": "4GB", "temp_directory": "/tmp/duckdb", "max_temp_directory_size": "20GB", diff --git a/.claude/skills/sqlspec_adapters/oracledb.md b/.claude/skills/sqlspec_adapters/oracledb.md index 832ec1511..fcc7fd89a 100644 --- a/.claude/skills/sqlspec_adapters/oracledb.md +++ b/.claude/skills/sqlspec_adapters/oracledb.md @@ -28,7 +28,7 @@ This adapter supports dual sync/async patterns, making it suitable for both trad from sqlspec.adapters.oracledb import OracleSyncConfig, OracleDriverFeatures config = OracleSyncConfig( - pool_config={ + connection_config={ # Basic connection: "dsn": "localhost:1521/XEPDB1", # OR individual parameters: @@ -67,7 +67,7 @@ config = OracleSyncConfig( from sqlspec.adapters.oracledb import OracleAsyncConfig, OracleDriverFeatures config = OracleAsyncConfig( - pool_config={ + connection_config={ "dsn": "localhost:1521/XEPDB1", "user": "myuser", "password": "mypass", @@ -90,7 +90,7 @@ async with config.provide_session() as session: ```python config = OracleSyncConfig( - pool_config={ + connection_config={ "user": "ADMIN", "password": "MyCloudPassword123", "dsn": "mydb_high", # TNS alias from tnsnames.ora @@ -105,7 +105,7 @@ config = OracleSyncConfig( ```python config = OracleSyncConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 1521, "sid": "XE", # Use SID instead of service_name @@ -150,7 +150,7 @@ import numpy as np # Auto-enabled if NumPy installed config = OracleSyncConfig( - pool_config={...}, + connection_config={...}, driver_features={"enable_numpy_vectors": True} # Auto-detected ) @@ -186,7 +186,7 @@ Automatic conversion between Python UUIDs and RAW(16) binary format: import uuid config = OracleSyncConfig( - pool_config={...}, + connection_config={...}, driver_features={"enable_uuid_binary": True} # Default: True ) @@ -231,7 +231,7 @@ Oracle defaults unquoted identifiers to uppercase. SQLSpec normalizes to lowerca ```python config = OracleSyncConfig( - pool_config={...}, + connection_config={...}, driver_features={"enable_lowercase_column_names": True} # Default: True ) @@ -299,7 +299,7 @@ Oracle's connection pool provides production-grade resource management: ```python config = OracleSyncConfig( - pool_config={ + connection_config={ "dsn": "localhost:1521/XEPDB1", "user": "myuser", "password": "mypass", @@ -334,7 +334,7 @@ def init_session(connection, tag): cursor.close() config = OracleSyncConfig( - pool_config={ + connection_config={ "dsn": "localhost:1521/XEPDB1", "user": "myuser", "password": "mypass", @@ -414,7 +414,7 @@ session.execute_many( ```python # Use full DSN string instead of TNS alias config = OracleSyncConfig( - pool_config={ + connection_config={ "dsn": "(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=localhost)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=XEPDB1)))" } ) @@ -431,7 +431,7 @@ os.environ["TNS_ADMIN"] = "/path/to/tnsnames_dir" **Solution for Cloud Wallet**: ```python config = OracleSyncConfig( - pool_config={ + connection_config={ "user": "ADMIN", "password": "CloudPassword123", "dsn": "mydb_high", # Must match tnsnames.ora alias @@ -450,7 +450,7 @@ config = OracleSyncConfig( ```python # Increase pool size config = OracleSyncConfig( - pool_config={ + connection_config={ "max": 32, # Increase from 16 "wait_timeout": 5000, # Wait longer (5 seconds) } diff --git a/.claude/skills/sqlspec_adapters/psqlpy.md b/.claude/skills/sqlspec_adapters/psqlpy.md index 8250a6bbc..8a9eb728b 100644 --- a/.claude/skills/sqlspec_adapters/psqlpy.md +++ b/.claude/skills/sqlspec_adapters/psqlpy.md @@ -25,7 +25,7 @@ Built on Rust's tokio async runtime and the native PostgreSQL protocol, Psqlpy d from sqlspec.adapters.psqlpy import PsqlpyConfig, PsqlpyDriverFeatures config = PsqlpyConfig( - pool_config={ + connection_config={ # Connection DSN (recommended): "dsn": "postgresql://user:pass@localhost:5432/dbname", # OR individual parameters: @@ -105,7 +105,7 @@ import numpy as np # Auto-registered if pgvector installed config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/vectordb" }, driver_features={"enable_pgvector": True} # Auto-detected @@ -165,7 +165,7 @@ Built-in connection pooling with Rust's tokio runtime: ```python config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/db", "max_db_pool_size": 30, # Maximum connections "conn_recycling_method": "fast", # Fast connection recycling @@ -232,14 +232,14 @@ Two connection recycling strategies: ```python # Fast recycling (default) - minimal overhead config = PsqlpyConfig( - pool_config={ + connection_config={ "conn_recycling_method": "fast" } ) # Auto recycling - more thorough cleanup config = PsqlpyConfig( - pool_config={ + connection_config={ "conn_recycling_method": "auto" } ) @@ -251,7 +251,7 @@ Built-in load balancing for PostgreSQL replicas: ```python config = PsqlpyConfig( - pool_config={ + connection_config={ "hosts": ["primary.db.local", "replica1.db.local", "replica2.db.local"], "ports": [5432, 5432, 5432], "load_balance_hosts": "random", # Random selection @@ -351,7 +351,7 @@ psql "postgresql://user@localhost/db" -c "SELECT 1" Verify connection parameters: ```python config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:pass@localhost:5432/dbname", "connect_timeout_sec": 30, # Increase timeout } @@ -363,7 +363,7 @@ config = PsqlpyConfig( Increase pool size: ```python config = PsqlpyConfig( - pool_config={ + connection_config={ "max_db_pool_size": 50, # Increase from default } ) @@ -397,7 +397,7 @@ await session.execute("SELECT * FROM users WHERE id = :id", {"id": user_id}) Increase connection and TCP timeouts: ```python config = PsqlpyConfig( - pool_config={ + connection_config={ "connect_timeout_sec": 30, "tcp_user_timeout_sec": 60, "keepalives": True, diff --git a/.claude/skills/sqlspec_adapters/psycopg.md b/.claude/skills/sqlspec_adapters/psycopg.md index 338f47130..310e9bb37 100644 --- a/.claude/skills/sqlspec_adapters/psycopg.md +++ b/.claude/skills/sqlspec_adapters/psycopg.md @@ -27,7 +27,7 @@ Psycopg 3 combines battle-tested reliability with modern features like connectio from sqlspec.adapters.psycopg import PsycopgAsyncConfig, PsycopgDriverFeatures config = PsycopgAsyncConfig( - pool_config={ + connection_config={ # Connection string (recommended): "conninfo": "postgresql://user:pass@localhost:5432/dbname", # OR individual parameters: @@ -75,7 +75,7 @@ config = PsycopgAsyncConfig( from sqlspec.adapters.psycopg import PsycopgSyncConfig, PsycopgDriverFeatures config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:pass@localhost:5432/dbname", "min_size": 4, "max_size": 20, @@ -129,7 +129,7 @@ from sqlspec.adapters.psycopg import PsycopgAsyncConfig # Auto-registered if pgvector installed config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://localhost/vectordb" }, driver_features={"enable_pgvector": True} # Auto-detected @@ -183,7 +183,7 @@ Production-grade connection pooling with extensive configuration: ```python config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://localhost/db", "min_size": 10, # Keep 10 connections ready "max_size": 40, # Allow up to 40 total @@ -335,7 +335,7 @@ listen_addresses = '*' # or specific IP Increase pool size or reduce connection lifetime: ```python config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "max_size": 50, # Increase from default "timeout": 120.0, # Longer acquisition timeout "max_waiting": 100, # Allow more queued requests @@ -362,7 +362,7 @@ If error persists, check logs for DEBUG message about graceful degradation. Configure SSL in connection string: ```python config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user@host/db?sslmode=require", # OR: "sslmode": "require", diff --git a/.claude/skills/sqlspec_adapters/spanner.md b/.claude/skills/sqlspec_adapters/spanner.md index 8029d7929..7fa4e8643 100644 --- a/.claude/skills/sqlspec_adapters/spanner.md +++ b/.claude/skills/sqlspec_adapters/spanner.md @@ -22,7 +22,7 @@ Expert guidance for using SQLSpec's Spanner adapter for Google Cloud Spanner. Sp from sqlspec.adapters.spanner import SpannerSyncConfig config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-gcp-project", "instance_id": "my-instance", "database_id": "my-database", @@ -44,7 +44,7 @@ config = SpannerSyncConfig( from google.auth.credentials import AnonymousCredentials config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "test-project", "instance_id": "test-instance", "database_id": "test-database", @@ -156,7 +156,7 @@ from sqlspec.adapters.spanner import SpannerSyncConfig from sqlspec.adapters.spanner.litestar import SpannerSyncStore config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", @@ -191,7 +191,7 @@ from sqlspec.adapters.spanner import SpannerSyncConfig from sqlspec.adapters.spanner.adk import SpannerADKStore config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", diff --git a/.claude/skills/sqlspec_adapters/sqlite.md b/.claude/skills/sqlspec_adapters/sqlite.md index 89fa783a2..5689518ff 100644 --- a/.claude/skills/sqlspec_adapters/sqlite.md +++ b/.claude/skills/sqlspec_adapters/sqlite.md @@ -27,7 +27,7 @@ SQLite provides ACID transactions, full SQL support, and thread-local connection from sqlspec.adapters.sqlite import SqliteConfig, SqliteDriverFeatures config = SqliteConfig( - pool_config={ + connection_config={ # Database path "database": "app.db", # File-based database # OR: "file:memory_{uuid}?mode=memory&cache=private", # Default @@ -83,7 +83,7 @@ result = session.execute( ```python # SQLite uses thread-local connections for safety config = SqliteConfig( - pool_config={ + connection_config={ "database": "app.db", "check_same_thread": False, # Pool handles thread safety } @@ -204,7 +204,7 @@ assert isinstance(result["timestamp"], datetime) ```python # Shared memory database (multiple connections see same data) config = SqliteConfig( - pool_config={ + connection_config={ "database": "file:memdb1?mode=memory&cache=shared", "uri": True, } @@ -212,7 +212,7 @@ config = SqliteConfig( # Private memory database (isolated) config = SqliteConfig( - pool_config={ + connection_config={ "database": "file:memdb2?mode=memory&cache=private", "uri": True, } @@ -223,7 +223,7 @@ config = SqliteConfig( ```python config = SqliteConfig( - pool_config={ + connection_config={ "database": "file:app.db?mode=ro", "uri": True, } @@ -277,7 +277,7 @@ storage.import_arrow("users", data) ```python config = SqliteConfig( - pool_config={ + connection_config={ "cached_statements": 256, # Cache 256 prepared statements } ) @@ -304,7 +304,7 @@ except Exception: # Context manager (autocommit disabled) config = SqliteConfig( - pool_config={ + connection_config={ "isolation_level": "DEFERRED", # Enable transaction mode } ) @@ -348,7 +348,7 @@ result = session.execute( Increase timeout or use WAL mode: ```python config = SqliteConfig( - pool_config={ + connection_config={ "timeout": 30.0, # Wait up to 30s for locks } ) @@ -383,7 +383,7 @@ config = SqliteConfig( Explicitly enable URI mode: ```python config = SqliteConfig( - pool_config={ + connection_config={ "database": "file:app.db?mode=ro", "uri": True, # Required } diff --git a/.claude/skills/sqlspec_usage/examples/fastapi_integration.py b/.claude/skills/sqlspec_usage/examples/fastapi_integration.py index bc5fa5cd9..de17db7ec 100644 --- a/.claude/skills/sqlspec_usage/examples/fastapi_integration.py +++ b/.claude/skills/sqlspec_usage/examples/fastapi_integration.py @@ -40,7 +40,7 @@ class User(BaseModel): spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/myapp", "min_size": 5, "max_size": 10}, + connection_config={"dsn": "postgresql://localhost/myapp", "min_size": 5, "max_size": 10}, extension_config={ "starlette": { # FastAPI uses starlette config key "commit_mode": "autocommit", diff --git a/.claude/skills/sqlspec_usage/examples/litestar_integration.py b/.claude/skills/sqlspec_usage/examples/litestar_integration.py index 3962328fa..3fd35a368 100644 --- a/.claude/skills/sqlspec_usage/examples/litestar_integration.py +++ b/.claude/skills/sqlspec_usage/examples/litestar_integration.py @@ -51,7 +51,7 @@ class AnalyticsEvent(BaseModel): # Primary database (PostgreSQL) primary_db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/myapp", "min_size": 10, "max_size": 20}, + connection_config={"dsn": "postgresql://localhost/myapp", "min_size": 10, "max_size": 20}, extension_config={ "litestar": { "session_key": "primary_db", # Custom key @@ -69,7 +69,7 @@ class AnalyticsEvent(BaseModel): # Analytics database (DuckDB) analytics_db = spec.add_config( DuckDBConfig( - pool_config={"database": "analytics.duckdb", "config": {"memory_limit": "4GB"}}, + connection_config={"database": "analytics.duckdb", "config": {"memory_limit": "4GB"}}, extension_config={ "litestar": { "session_key": "analytics_db", # Unique key diff --git a/.claude/skills/sqlspec_usage/examples/multi_database.py b/.claude/skills/sqlspec_usage/examples/multi_database.py index d4de9d4b7..3fe20f435 100644 --- a/.claude/skills/sqlspec_usage/examples/multi_database.py +++ b/.claude/skills/sqlspec_usage/examples/multi_database.py @@ -39,18 +39,21 @@ class Order(BaseModel): # Production database (PostgreSQL) production_db = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/production", "min_size": 20, "max_size": 50}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/production", "min_size": 20, "max_size": 50}) ) # Cache database (SQLite) cache_db = spec.add_config( - SqliteConfig(pool_config={"database": "/var/lib/myapp/cache.db", "check_same_thread": False}) + SqliteConfig(connection_config={"database": "/var/lib/myapp/cache.db", "check_same_thread": False}) ) # Analytics database (DuckDB) analytics_db = spec.add_config( DuckDBConfig( - pool_config={"database": "/var/lib/myapp/analytics.duckdb", "config": {"memory_limit": "8GB", "threads": 4}} + connection_config={ + "database": "/var/lib/myapp/analytics.duckdb", + "config": {"memory_limit": "8GB", "threads": 4}, + } ) ) diff --git a/.claude/skills/sqlspec_usage/examples/testing_patterns.py b/.claude/skills/sqlspec_usage/examples/testing_patterns.py index b73266ba6..23a344d9a 100644 --- a/.claude/skills/sqlspec_usage/examples/testing_patterns.py +++ b/.claude/skills/sqlspec_usage/examples/testing_patterns.py @@ -30,7 +30,7 @@ def asyncpg_config() -> AsyncpgConfig: # pytest-databases automatically provides postgres_service fixture # This is managed by pytest-databases - return AsyncpgConfig(pool_config={"dsn": "postgresql://postgres:password@localhost:5432/test"}) + return AsyncpgConfig(connection_config={"dsn": "postgresql://postgres:password@localhost:5432/test"}) @pytest.fixture(scope="session") @@ -69,7 +69,7 @@ async def asyncpg_session(asyncpg_db: type[AsyncpgConfig]) -> AsyncGenerator[Asy def sqlite_config() -> Generator[type[SqliteConfig], None, None]: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: # IMPORTANT: Use temp file for isolation in parallel tests - config = SqliteConfig(pool_config={"database": tmp.name}) + config = SqliteConfig(connection_config={"database": tmp.name}) # Apply schema spec = SQLSpec() @@ -174,7 +174,7 @@ def test_sqlite_isolation() -> None: """Test SQLite test isolation with temp files.""" # Each test gets own temp file with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) spec = SQLSpec() db = spec.add_config(config) @@ -194,7 +194,7 @@ def test_sqlite_isolation() -> None: @pytest.mark.parametrize( ("adapter_name", "config"), [ - ("asyncpg", AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/test"})) + ("asyncpg", AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"})) # Add more adapters as needed ], ) diff --git a/.claude/skills/sqlspec_usage/patterns/configuration.md b/.claude/skills/sqlspec_usage/patterns/configuration.md index 3f1a6e7b6..9a2ed0edc 100644 --- a/.claude/skills/sqlspec_usage/patterns/configuration.md +++ b/.claude/skills/sqlspec_usage/patterns/configuration.md @@ -13,7 +13,7 @@ from sqlspec.adapters.{adapter} import {Adapter}Config spec = SQLSpec() db = spec.add_config( {Adapter}Config( - pool_config={...}, # Tier 1: Connection parameters + connection_config={...}, # Tier 1: Connection parameters statement_config={...}, # Tier 2: SQL processing (optional) extension_config={...}, # Tier 3: Framework integration (optional) driver_features={...}, # Tier 4: Adapter-specific features (optional) @@ -22,7 +22,7 @@ db = spec.add_config( ) ``` -## Tier 1: pool_config (Connection Parameters) +## Tier 1: connection_config (Connection Parameters) Adapter-specific connection settings. Each adapter has different parameters. @@ -33,7 +33,7 @@ Adapter-specific connection settings. Each adapter has different parameters. from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:pass@localhost:5432/dbname", # OR individual parameters: "host": "localhost", @@ -57,7 +57,7 @@ from sqlspec.adapters.psycopg import PsycopgConfig, PsycopgAsyncConfig # Sync config = PsycopgConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://localhost/db", "min_size": 4, "max_size": 10, @@ -66,7 +66,7 @@ config = PsycopgConfig( # Async config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://localhost/db", "min_size": 4, "max_size": 10, @@ -79,7 +79,7 @@ config = PsycopgAsyncConfig( from sqlspec.adapters.psqlpy import PsqlpyConfig config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/db", "max_db_pool_size": 20, } @@ -93,7 +93,7 @@ config = PsqlpyConfig( from sqlspec.adapters.sqlite import SqliteConfig config = SqliteConfig( - pool_config={ + connection_config={ "database": "/path/to/database.db", # or ":memory:" "timeout": 5.0, "check_same_thread": False, # For multi-threaded use @@ -106,7 +106,7 @@ config = SqliteConfig( from sqlspec.adapters.aiosqlite import AiosqliteConfig config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "/path/to/database.db", "timeout": 5.0, } @@ -119,7 +119,7 @@ config = AiosqliteConfig( from sqlspec.adapters.duckdb import DuckDBConfig config = DuckDBConfig( - pool_config={ + connection_config={ "database": "/path/to/database.duckdb", # or ":memory:" "read_only": False, "config": { @@ -137,7 +137,7 @@ from sqlspec.adapters.oracledb import OracleConfig, OracleAsyncConfig # Sync config = OracleConfig( - pool_config={ + connection_config={ "user": "myuser", "password": "mypass", "dsn": "localhost:1521/ORCLPDB1", @@ -149,7 +149,7 @@ config = OracleConfig( # Async config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "myuser", "password": "mypass", "dsn": "localhost:1521/ORCLPDB1", @@ -166,7 +166,7 @@ config = OracleAsyncConfig( from sqlspec.adapters.asyncmy import AsyncmyConfig config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 3306, "user": "myuser", @@ -184,7 +184,7 @@ config = AsyncmyConfig( from sqlspec.adapters.bigquery import BigQueryConfig config = BigQueryConfig( - pool_config={ + connection_config={ "project": "my-gcp-project", "dataset": "my_dataset", "credentials": "/path/to/service-account.json", # or None for ADC @@ -198,7 +198,7 @@ config = BigQueryConfig( from sqlspec.adapters.adbc import ADBCConfig config = ADBCConfig( - pool_config={ + connection_config={ "driver": "adbc_driver_postgresql", "uri": "postgresql://localhost/db", # Driver-specific options @@ -218,7 +218,7 @@ Controls SQL statement parsing, validation, and transformation: from sqlspec.core import StatementConfig config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, statement_config=StatementConfig( enable_validation=True, # Validate SQL syntax enable_transformations=True, # Apply SQL transformations @@ -241,7 +241,7 @@ Framework-specific settings keyed by framework name. ```python config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, extension_config={ "litestar": { "connection_key": "postgres_connection", @@ -261,7 +261,7 @@ config = AsyncpgConfig( ```python config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, extension_config={ "starlette": { # Same key for FastAPI "connection_key": "postgres_connection", @@ -280,7 +280,7 @@ config = AsyncpgConfig( ```python config = SqliteConfig( - pool_config={...}, + connection_config={...}, extension_config={ "flask": { "connection_key": "db_connection", @@ -307,7 +307,7 @@ Optional features that require additional dependencies or control adapter behavi from sqlspec.adapters.asyncpg import AsyncpgConfig, AsyncpgDriverFeatures config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, driver_features=AsyncpgDriverFeatures( enable_pgvector=True, # Auto-detected if pgvector installed enable_json_codecs=True, # Register JSON codecs @@ -330,7 +330,7 @@ config = AsyncpgConfig( from sqlspec.adapters.psycopg import PsycopgAsyncConfig, PsycopgDriverFeatures config = PsycopgAsyncConfig( - pool_config={...}, + connection_config={...}, driver_features=PsycopgDriverFeatures( enable_pgvector=True, enable_json_codecs=True, @@ -345,7 +345,7 @@ config = PsycopgAsyncConfig( from sqlspec.adapters.duckdb import DuckDBConfig, DuckDBDriverFeatures config = DuckDBConfig( - pool_config={...}, + connection_config={...}, driver_features=DuckDBDriverFeatures( enable_uuid_conversion=True, # Convert UUID strings to UUID objects json_serializer=orjson.dumps, # Use orjson for performance @@ -363,7 +363,7 @@ config = DuckDBConfig( from sqlspec.adapters.oracledb import OracleAsyncConfig, OracleDriverFeatures config = OracleAsyncConfig( - pool_config={...}, + connection_config={...}, driver_features=OracleDriverFeatures( enable_numpy_vectors=True, # NumPy array ↔ Oracle VECTOR conversion enable_uuid_binary=True, # UUID ↔ RAW(16) conversion @@ -377,7 +377,7 @@ config = OracleAsyncConfig( from sqlspec.adapters.aiosqlite import AiosqliteConfig, AiosqliteDriverFeatures config = AiosqliteConfig( - pool_config={...}, + connection_config={...}, driver_features=AiosqliteDriverFeatures( enable_json_detection=True, # Detect and parse JSON strings json_serializer=json.dumps, @@ -399,7 +399,7 @@ spec = SQLSpec() # Primary PostgreSQL database primary = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={ "litestar": {"session_key": "primary_db"} } @@ -409,7 +409,7 @@ primary = spec.add_config( # Analytics DuckDB database analytics = spec.add_config( DuckDBConfig( - pool_config={"database": "analytics.duckdb"}, + connection_config={"database": "analytics.duckdb"}, extension_config={ "litestar": {"session_key": "analytics_db"} } @@ -435,7 +435,7 @@ Configure migration behavior: ```python config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, migration_config={ "script_location": "migrations", # Directory for migration files "version_table": "sqlspec_version", # Version tracking table @@ -483,12 +483,12 @@ Most adapters auto-detect optional features: ```python # pgvector auto-enabled if package installed -config = AsyncpgConfig(pool_config={...}) +config = AsyncpgConfig(connection_config={...}) # driver_features["enable_pgvector"] auto-set based on import # Explicit override config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, driver_features={"enable_pgvector": False} # Force disable ) ``` @@ -511,7 +511,7 @@ cache = spec.add_config(SqliteConfig( import os config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": os.getenv("DATABASE_URL"), "min_size": int(os.getenv("DB_POOL_MIN", "10")), "max_size": int(os.getenv("DB_POOL_MAX", "20")), @@ -524,19 +524,19 @@ config = AsyncpgConfig( ## Common Configuration Errors -### Error: "Invalid pool_config parameter" +### Error: "Invalid connection_config parameter" **Cause:** Using wrong parameter name for adapter ```python # WRONG - using asyncpg params for psycopg config = PsycopgConfig( - pool_config={"dsn": "...", "min_size": 10} + connection_config={"dsn": "...", "min_size": 10} ) # CORRECT config = PsycopgConfig( - pool_config={"conninfo": "...", "min_size": 10} + connection_config={"conninfo": "...", "min_size": 10} ) ``` diff --git a/.claude/skills/sqlspec_usage/patterns/frameworks.md b/.claude/skills/sqlspec_usage/patterns/frameworks.md index 61fdaca39..46576b86b 100644 --- a/.claude/skills/sqlspec_usage/patterns/frameworks.md +++ b/.claude/skills/sqlspec_usage/patterns/frameworks.md @@ -13,7 +13,7 @@ from sqlspec.extensions.litestar import SQLSpecPlugin spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/db"}, + connection_config={"dsn": "postgresql://localhost/db"}, extension_config={ "litestar": { "commit_mode": "autocommit", @@ -46,7 +46,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig, AsyncpgDriver from sqlspec.extensions.fastapi import SQLSpecPlugin spec = SQLSpec() -db = spec.add_config(AsyncpgConfig(pool_config={...})) +db = spec.add_config(AsyncpgConfig(connection_config={...})) plugin = SQLSpecPlugin(spec) app = FastAPI() @@ -68,7 +68,7 @@ from starlette.routing import Route from sqlspec.extensions.starlette import SQLSpecPlugin spec = SQLSpec() -db = spec.add_config(AsyncpgConfig(pool_config={...})) +db = spec.add_config(AsyncpgConfig(connection_config={...})) plugin = SQLSpecPlugin(spec) async def homepage(request): @@ -90,7 +90,7 @@ from sqlspec.extensions.flask import SQLSpecPlugin app = Flask(__name__) spec = SQLSpec() -db = spec.add_config(SqliteConfig(pool_config={"database": "app.db"})) +db = spec.add_config(SqliteConfig(connection_config={"database": "app.db"})) plugin = SQLSpecPlugin(spec) plugin.init_app(app) @@ -111,12 +111,12 @@ def get_user(user_id): ```python # Configure multiple databases primary = spec.add_config(AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={"litestar": {"session_key": "primary_db"}} )) cache = spec.add_config(SqliteConfig( - pool_config={"database": "cache.db"}, + connection_config={"database": "cache.db"}, extension_config={"litestar": {"session_key": "cache_db"}} )) @@ -133,7 +133,7 @@ For custom DI solutions (Dishka, dependency-injector): ```python config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, extension_config={ "litestar": {"disable_di": True} } diff --git a/.claude/skills/sqlspec_usage/patterns/migrations.md b/.claude/skills/sqlspec_usage/patterns/migrations.md index 0953ef0db..5b7d450d4 100644 --- a/.claude/skills/sqlspec_usage/patterns/migrations.md +++ b/.claude/skills/sqlspec_usage/patterns/migrations.md @@ -117,7 +117,7 @@ def downgrade(driver): ```python config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, migration_config={ "script_location": "migrations", "version_table": "sqlspec_version", diff --git a/.claude/skills/sqlspec_usage/patterns/performance.md b/.claude/skills/sqlspec_usage/patterns/performance.md index 4d727437f..fe36443fd 100644 --- a/.claude/skills/sqlspec_usage/patterns/performance.md +++ b/.claude/skills/sqlspec_usage/patterns/performance.md @@ -8,7 +8,7 @@ Guide to optimizing SQLSpec performance. ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/db", "min_size": 10, # Keep 10 connections ready "max_size": 20, # Allow up to 20 total @@ -181,10 +181,10 @@ count = result.scalar() ```python # ✅ GOOD - Async for web apps -config = AsyncpgConfig(pool_config={...}) +config = AsyncpgConfig(connection_config={...}) # ❌ LESS EFFICIENT - Sync blocks threads -config = PsycopgConfig(pool_config={...}) +config = PsycopgConfig(connection_config={...}) ``` **Async benefits:** @@ -241,7 +241,7 @@ print(f"Query took {duration:.3f}s") # Use observability middleware config = AsyncpgConfig( - pool_config={...}, + connection_config={...}, extension_config={ "litestar": { "enable_correlation_middleware": True # Request tracking diff --git a/.claude/skills/sqlspec_usage/patterns/testing.md b/.claude/skills/sqlspec_usage/patterns/testing.md index 56ca0f5a8..a593c8ea9 100644 --- a/.claude/skills/sqlspec_usage/patterns/testing.md +++ b/.claude/skills/sqlspec_usage/patterns/testing.md @@ -7,7 +7,7 @@ Best practices for testing with SQLSpec. **❌ WRONG - :memory: with pooling causes test failures:** ```python def test_something(): - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) # Shared state across parallel tests! ``` @@ -17,7 +17,7 @@ import tempfile def test_something(): with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) # Each test gets isolated database ``` @@ -32,7 +32,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig @pytest.fixture(scope="session") def asyncpg_config(postgres_service: PostgresService): return AsyncpgConfig( - pool_config={"dsn": postgres_service.connection_url()} + connection_config={"dsn": postgres_service.connection_url()} ) @pytest.fixture(scope="session") @@ -96,7 +96,7 @@ def test_starlette_autocommit(): with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sql.add_config(config) diff --git a/.claude/skills/sqlspec_usage/patterns/troubleshooting.md b/.claude/skills/sqlspec_usage/patterns/troubleshooting.md index 543355530..6253cd598 100644 --- a/.claude/skills/sqlspec_usage/patterns/troubleshooting.md +++ b/.claude/skills/sqlspec_usage/patterns/troubleshooting.md @@ -41,7 +41,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig ### "TypeError: __init__() got an unexpected keyword argument 'dsn'" -**Cause:** Passing connection parameters directly instead of in `pool_config` +**Cause:** Passing connection parameters directly instead of in `connection_config` **Solution:** ```python @@ -49,7 +49,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig(dsn="postgresql://localhost/db") # ✅ CORRECT -config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/db"}) +config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db"}) ``` ### "ImproperConfigurationError: Duplicate state keys found" @@ -116,7 +116,7 @@ await config.create_pool() ```python # Increase pool size config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "...", "max_size": 40, # Increase from default 20 "timeout": 120.0, # Increase timeout @@ -267,13 +267,13 @@ await session.commit() # Must commit! **Solution:** ```python # ❌ WRONG - Shared state across tests -config = AiosqliteConfig(pool_config={"database": ":memory:"}) +config = AiosqliteConfig(connection_config={"database": ":memory:"}) # ✅ CORRECT - Isolated temp files import tempfile with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) ``` ### "Fixture not found: postgres_service" @@ -391,7 +391,7 @@ async def get_users(db_session: AsyncpgDriver): # Must match session_key ```python # 1. Check if pooling enabled config = AsyncpgConfig( - pool_config={ + connection_config={ "min_size": 10, # Should be > 0 for pooling "max_size": 20, } @@ -518,7 +518,7 @@ result = await session.execute("SELECT * FROM users WHERE id = ?", user_id) ```python # Get pool statistics -pool = config.pool_instance +pool = config.connection_instance print(f"Pool size: {pool.get_size()}") print(f"Free connections: {pool.get_idle_size()}") diff --git a/.claude/skills/sqlspec_usage/skill.md b/.claude/skills/sqlspec_usage/skill.md index f82010bf6..6b8b88b60 100644 --- a/.claude/skills/sqlspec_usage/skill.md +++ b/.claude/skills/sqlspec_usage/skill.md @@ -46,7 +46,7 @@ from sqlspec.adapters.{adapter} import {Adapter}Config spec = SQLSpec() db = spec.add_config( {Adapter}Config( - pool_config={...}, # Connection parameters + connection_config={...}, # Connection parameters statement_config={...}, # SQL processing (optional) extension_config={...}, # Framework integration (optional) driver_features={...}, # Adapter-specific features (optional) @@ -57,7 +57,7 @@ db = spec.add_config( **Key Principles:** 1. Always store the config key returned from `add_config()` -2. Use `pool_config` dict for connection parameters (adapter-specific) +2. Use `connection_config` dict for connection parameters (adapter-specific) 3. Define `driver_features` using TypedDict for type safety 4. Auto-detect optional features when dependencies are available 5. Use unique `session_key` values for multi-database setups @@ -131,7 +131,7 @@ from sqlspec.extensions.litestar import SQLSpecPlugin spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/db"}, + connection_config={"dsn": "postgresql://localhost/db"}, extension_config={ "litestar": { "commit_mode": "autocommit", # or "manual", "autocommit_include_redirect" @@ -180,7 +180,7 @@ from sqlspec.extensions.flask import SQLSpecPlugin app = Flask(__name__) spec = SQLSpec() -db = spec.add_config(SqliteConfig(pool_config={"database": "app.db"})) +db = spec.add_config(SqliteConfig(connection_config={"database": "app.db"})) plugin = SQLSpecPlugin(spec) plugin.init_app(app) @@ -278,7 +278,7 @@ def test_with_pooling(): """Use temp files, NOT :memory: with pooling!""" with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: config = AiosqliteConfig( - pool_config={"database": tmp.name} # Isolated per test + connection_config={"database": tmp.name} # Isolated per test ) # Test logic here ``` @@ -290,7 +290,7 @@ def test_with_pooling(): @pytest.fixture(scope="session") def asyncpg_config(postgres_service: PostgresService): return AsyncpgConfig( - pool_config={"dsn": postgres_service.connection_url()} + connection_config={"dsn": postgres_service.connection_url()} ) @pytest.fixture(scope="session") @@ -315,7 +315,7 @@ uv run pytest -n auto --dist=loadgroup **Connection Pooling:** ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/db", "min_size": 10, "max_size": 20, @@ -368,14 +368,14 @@ spec.add_config(AsyncpgConfig(...)) # Lost reference! db = spec.add_config(AsyncpgConfig(...)) ``` -❌ **Missing pool_config:** +❌ **Missing connection_config:** ```python config = AsyncpgConfig(dsn="postgresql://...") # Wrong! ``` -✅ **Use pool_config dict:** +✅ **Use connection_config dict:** ```python -config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) +config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) ``` ### Session Management Anti-Patterns @@ -457,14 +457,14 @@ spec.add_config(DuckDBConfig( ❌ **Using :memory: with pooling:** ```python -config = AiosqliteConfig(pool_config={"database": ":memory:"}) +config = AiosqliteConfig(connection_config={"database": ":memory:"}) # Shared state in parallel tests! ``` ✅ **Use temp files:** ```python with tempfile.NamedTemporaryFile(suffix=".db") as tmp: - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) ``` ## Troubleshooting Guide diff --git a/.gemini/bootstrap.md b/.gemini/bootstrap.md index 069d0c91a..355ec0efe 100644 --- a/.gemini/bootstrap.md +++ b/.gemini/bootstrap.md @@ -1516,7 +1516,7 @@ class {Adapter}Config(AsyncDatabaseConfig): ```python async def _create_pool(self) -> Pool: \"\"\"Create connection pool.\"\"\" - config = dict(self.pool_config) + config = dict(self.connection_config) # Pattern: session callback for initialization if self.driver_features.get("enable_feature", False): config["session_callback"] = self._init_connection @@ -1669,7 +1669,7 @@ make fix # Auto-fix formatting issues **Solution**: ```python async def _create_pool(self): - config = dict(self.pool_config) + config = dict(self.connection_config) if self.driver_features.get("enable_feature", False): config["session_callback"] = self._init_connection return await create_pool(**config) @@ -2256,7 +2256,7 @@ async def test_{feature}_basic_usage(asyncpg_dsn): # Similar to: test_similar1.py:30-55 sql = SQLSpec() - config = AsyncpgConfig(pool_config={"dsn": asyncpg_dsn}) + config = AsyncpgConfig(connection_config={"dsn": asyncpg_dsn}) sql.add_config(config) async with sql.provide_session(config) as session: @@ -2273,7 +2273,7 @@ async def test_{feature}_edge_case(asyncpg_dsn): # Similar to: test_similar1.py:70-90 sql = SQLSpec() - config = AsyncpgConfig(pool_config={"dsn": asyncpg_dsn}) + config = AsyncpgConfig(connection_config={"dsn": asyncpg_dsn}) sql.add_config(config) async with sql.provide_session(config) as session: @@ -2797,7 +2797,7 @@ Review implementation for these pattern types: ### Configuration Patterns - [ ] driver_features additions -- [ ] pool_config patterns +- [ ] connection_config patterns - [ ] extension_config patterns ### Testing Patterns @@ -2887,7 +2887,7 @@ Example: from sqlspec.adapters.{adapter} import {Adapter}Config config = {Adapter}Config( - pool_config={"dsn": "..."}, + connection_config={"dsn": "..."}, driver_features={ "enable_{feature}": True, # Auto-enabled when {condition} } diff --git a/.gemini/commands/sync-guides.toml b/.gemini/commands/sync-guides.toml index 571a61530..54327c0fc 100644 --- a/.gemini/commands/sync-guides.toml +++ b/.gemini/commands/sync-guides.toml @@ -48,7 +48,7 @@ Glob(pattern="**/*.py", path="sqlspec/") For each adapter, identify: - Public methods (no leading underscore) -- Configuration options (driver_features, pool_config) +- Configuration options (driver_features, connection_config) - Type annotations (what's the actual signature?) - Dependencies (required vs optional packages) @@ -117,7 +117,7 @@ Grep(pattern=r'def execute_query', path="sqlspec/", output_mode="files_with_matc # Code reality check: Read("sqlspec/adapters/asyncpg/config.py") -# Find actual pool_config fields +# Find actual connection_config fields # If pool_size doesn't exist → REWRITE guide with correct options ``` diff --git a/.gemini/commands/test.toml b/.gemini/commands/test.toml index 1d9ff52e8..bb907f21c 100644 --- a/.gemini/commands/test.toml +++ b/.gemini/commands/test.toml @@ -75,7 +75,7 @@ def temp_db(): def test_pooled_connection_isolation(temp_db): # Test connection pooling with isolated database. - config = AiosqliteConfig(pool_config={"database": temp_db}) + config = AiosqliteConfig(connection_config={"database": temp_db}) # Test implementation ``` @@ -124,7 +124,7 @@ async def test_oracle_arrow_conversion(oracle_session): ❌ **BAD - Shared Memory Database**: ```python def test_with_memory_db(): - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) # Fails with pytest -n 2 - tables persist across tests! ``` @@ -135,7 +135,7 @@ import tempfile def test_with_isolated_db(): # Test with isolated temporary database file. with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) # Each test gets its own database - no conflicts! ``` @@ -365,14 +365,14 @@ def test_arrow_conversion_large_dataset(): ... ❌ **BAD - Memory DB with Pooling**: ```python -config = AiosqliteConfig(pool_config={"database": ":memory:"}) +config = AiosqliteConfig(connection_config={"database": ":memory:"}) # Breaks with parallel tests! ``` ✅ **GOOD - Temporary File with Pooling**: ```python with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) # Safe for parallel tests ``` diff --git a/AGENTS.md b/AGENTS.md index 25abe45e8..f86847da7 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -93,7 +93,7 @@ Supported adapters: adbc, aiosqlite, asyncmy, asyncpg, bigquery, duckdb, oracled manager = SQLSpec() # 2. Create and register configuration (returns same instance) -config = manager.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://..."})) +config = manager.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://..."})) # 3. Get session via context manager (using config instance) async with manager.provide_session(config) as session: @@ -283,8 +283,8 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig manager = SQLSpec() # Config instance IS the handle - add_config returns same instance -main_db = manager.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://main/..."})) -analytics_db = manager.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://analytics/..."})) +main_db = manager.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://main/..."})) +analytics_db = manager.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://analytics/..."})) # Type checker knows: AsyncpgConfig → AsyncContextManager[AsyncpgDriver] async with manager.provide_session(main_db) as driver: @@ -330,7 +330,7 @@ All extensions use `extension_config` in database config: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "starlette": {"commit_mode": "autocommit", "session_key": "db"} } @@ -470,6 +470,46 @@ Dialect.classes["custom"] = CustomDialect **Documentation**: See `/docs/guides/architecture/custom-sqlglot-dialects.md` for full guide +### Configuration Parameter Standardization Pattern + +For API consistency across all adapters (pooled and non-pooled): + +```python +# ALL configs accept these parameters for consistency: +class AdapterConfig(AsyncDatabaseConfig): # or SyncDatabaseConfig + def __init__( + self, + *, + connection_config: dict[str, Any] | None = None, # Settings dict + connection_instance: PoolT | None = None, # Pre-created pool/connection + ... + ) -> None: + super().__init__( + connection_config=connection_config, + connection_instance=connection_instance, + ... + ) +``` + +**Key principles:** + +- `connection_config` holds ALL connection and pool configuration (unified dict) +- `connection_instance` accepts pre-created pools or connections (for dependency injection) +- Works semantically for both pooled (AsyncPG) and non-pooled adapters (BigQuery, ADBC) +- Non-pooled adapters accept `connection_instance` for API consistency (even if always None) +- NoPoolSyncConfig and NoPoolAsyncConfig accept `connection_instance: Any = None` for flexibility + +**Why this pattern:** + +- Consistent API eliminates cognitive load when switching adapters +- Clear separation: config dict vs pre-created instance +- Supports dependency injection scenarios +- Better than adapter-specific parameter names + +**Migration from old names:** + +- v0.33.0+: `pool_config` → `connection_config`, `pool_instance` → `connection_instance` + ### Error Handling - Custom exceptions inherit from `SQLSpecError` in `sqlspec/exceptions.py` diff --git a/README.md b/README.md index 3abe8dd2c..d32309584 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ class Greeting(BaseModel): message: str spec = SQLSpec() -db = sql.add_config(SqliteConfig(pool_config={"database": ":memory:"})) +db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: greeting = session.select_one( diff --git a/docs/PYPI_README.md b/docs/PYPI_README.md index 3abe8dd2c..d32309584 100644 --- a/docs/PYPI_README.md +++ b/docs/PYPI_README.md @@ -64,7 +64,7 @@ class Greeting(BaseModel): message: str spec = SQLSpec() -db = sql.add_config(SqliteConfig(pool_config={"database": ":memory:"})) +db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: greeting = session.select_one( diff --git a/docs/changelog.rst b/docs/changelog.rst index dda760685..891c5daf0 100644 --- a/docs/changelog.rst +++ b/docs/changelog.rst @@ -10,6 +10,59 @@ SQLSpec Changelog Recent Updates ============== +v0.33.0 - Configuration Parameter Standardization (BREAKING CHANGE) +-------------------------------------------------------------------- + +**Breaking Change:** All adapter configuration parameter names have been standardized for consistency across the entire library. + +**What Changed:** + +All database adapter configurations now use consistent parameter names: + +- ``pool_config`` → ``connection_config`` (configuration dictionary) +- ``pool_instance`` → ``connection_instance`` (pre-created pool/connection instance) + +This affects **all 11 database adapters**: AsyncPG, Psycopg, Asyncmy, Psqlpy, OracleDB, SQLite, AioSQLite, DuckDB, BigQuery, ADBC, and Spanner. + +**Migration:** + +Simple search and replace in your codebase: + +.. code-block:: bash + + # Replace pool_config with connection_config + find . -name "*.py" -exec sed -i 's/pool_config=/connection_config=/g' {} + + + # Replace pool_instance with connection_instance + find . -name "*.py" -exec sed -i 's/pool_instance=/connection_instance=/g' {} + + +**Before:** + +.. code-block:: python + + config = AsyncpgConfig( + pool_config={"dsn": "postgresql://localhost/db"}, + pool_instance=my_pool + ) + +**After:** + +.. code-block:: python + + config = AsyncpgConfig( + connection_config={"dsn": "postgresql://localhost/db"}, + connection_instance=my_pool + ) + +**Why This Change:** + +- Eliminates inconsistency between pooled and non-pooled adapters +- More intuitive naming (``connection_instance`` works semantically for both pools and single connections) +- Reduces cognitive load when switching between adapters +- Clearer API for new users + +**See:** :doc:`/guides/migration/connection-config` for detailed migration guide with before/after examples for all adapters. + Query Stack Documentation Suite -------------------------------- @@ -43,7 +96,7 @@ All database configs (both sync and async) now provide migration methods: from sqlspec.migrations.commands import AsyncMigrationCommands config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, migration_config={"script_location": "migrations"} ) @@ -57,7 +110,7 @@ All database configs (both sync and async) now provide migration methods: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, migration_config={"script_location": "migrations"} ) diff --git a/docs/examples/adapters/asyncpg/connect_pool.py b/docs/examples/adapters/asyncpg/connect_pool.py index b393856f5..388496611 100644 --- a/docs/examples/adapters/asyncpg/connect_pool.py +++ b/docs/examples/adapters/asyncpg/connect_pool.py @@ -9,7 +9,7 @@ DSN = os.getenv("SQLSPEC_ASYNCPG_DSN", "postgresql://postgres:postgres@localhost:5432/postgres") -config = AsyncpgConfig(bind_key="docs_asyncpg", pool_config=AsyncpgPoolConfig(dsn=DSN, min_size=1, max_size=5)) +config = AsyncpgConfig(bind_key="docs_asyncpg", connection_config=AsyncpgPoolConfig(dsn=DSN, min_size=1, max_size=5)) async def main() -> None: diff --git a/docs/examples/adapters/oracledb/connect_async.py b/docs/examples/adapters/oracledb/connect_async.py index aed192a62..f870954fc 100644 --- a/docs/examples/adapters/oracledb/connect_async.py +++ b/docs/examples/adapters/oracledb/connect_async.py @@ -12,7 +12,7 @@ PASSWORD = os.getenv("SQLSPEC_ORACLE_PASSWORD", "oracle") DSN = os.getenv("SQLSPEC_ORACLE_DSN", "localhost/FREE") config = OracleAsyncConfig( - bind_key="docs_oracle_async", pool_config={"user": USER, "password": PASSWORD, "dsn": DSN, "min": 1, "max": 4} + bind_key="docs_oracle_async", connection_config={"user": USER, "password": PASSWORD, "dsn": DSN, "min": 1, "max": 4} ) diff --git a/docs/examples/adapters/psycopg/connect_sync.py b/docs/examples/adapters/psycopg/connect_sync.py index f4de9b809..4a108c118 100644 --- a/docs/examples/adapters/psycopg/connect_sync.py +++ b/docs/examples/adapters/psycopg/connect_sync.py @@ -9,7 +9,7 @@ DSN = os.getenv("SQLSPEC_PSYCOPG_DSN", "postgresql://postgres:postgres@localhost:5432/postgres") -config = PsycopgSyncConfig(bind_key="docs_psycopg", pool_config={"conninfo": DSN, "min_size": 1, "max_size": 4}) +config = PsycopgSyncConfig(bind_key="docs_psycopg", connection_config={"conninfo": DSN, "min_size": 1, "max_size": 4}) def main() -> None: diff --git a/docs/examples/arrow/arrow_basic_usage.py b/docs/examples/arrow/arrow_basic_usage.py index 3c5c6f704..f00170159 100644 --- a/docs/examples/arrow/arrow_basic_usage.py +++ b/docs/examples/arrow/arrow_basic_usage.py @@ -82,7 +82,7 @@ async def example_postgres_conversion() -> None: dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") db_manager = SQLSpec() - asyncpg_db = db_manager.add_config(AsyncpgConfig(pool_config={"dsn": dsn})) + asyncpg_db = db_manager.add_config(AsyncpgConfig(connection_config={"dsn": dsn})) async with db_manager.provide_session(asyncpg_db) as session: # Create test table with PostgreSQL-specific types @@ -122,7 +122,7 @@ async def example_pandas_integration() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - sqlite_db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + sqlite_db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(sqlite_db) as session: # Create and populate table @@ -168,7 +168,7 @@ async def example_polars_integration() -> None: from sqlspec.adapters.duckdb import DuckDBConfig db_manager = SQLSpec() - duckdb = db_manager.add_config(DuckDBConfig(pool_config={"database": ":memory:"})) + duckdb = db_manager.add_config(DuckDBConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(duckdb) as session: # Create and populate table @@ -211,7 +211,7 @@ async def example_return_formats() -> None: from sqlspec.adapters.duckdb import DuckDBConfig db_manager = SQLSpec() - duckdb = db_manager.add_config(DuckDBConfig(pool_config={"database": ":memory:"})) + duckdb = db_manager.add_config(DuckDBConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(duckdb) as session: # Create test data @@ -250,7 +250,7 @@ async def example_parquet_export() -> None: from sqlspec.adapters.duckdb import DuckDBConfig db_manager = SQLSpec() - duckdb = db_manager.add_config(DuckDBConfig(pool_config={"database": ":memory:"})) + duckdb = db_manager.add_config(DuckDBConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(duckdb) as session: # Create and populate table @@ -311,7 +311,7 @@ async def example_native_only_mode() -> None: print() # SQLite does not have native Arrow support - sqlite_db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + sqlite_db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(sqlite_db) as session: session.execute("CREATE TABLE test (id INTEGER, name TEXT)") diff --git a/docs/examples/extensions/adk/basic_aiosqlite.py b/docs/examples/extensions/adk/basic_aiosqlite.py index 70604ee72..222d66c25 100644 --- a/docs/examples/extensions/adk/basic_aiosqlite.py +++ b/docs/examples/extensions/adk/basic_aiosqlite.py @@ -29,7 +29,7 @@ def _event(author: str, text: str) -> Event: async def main() -> None: """Create a session, append two events, and read the transcript.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) store = AiosqliteADKStore(config) await store.create_tables() service = SQLSpecSessionService(store) diff --git a/docs/examples/extensions/adk/litestar_aiosqlite.py b/docs/examples/extensions/adk/litestar_aiosqlite.py index 8f8c40872..eaad094bc 100644 --- a/docs/examples/extensions/adk/litestar_aiosqlite.py +++ b/docs/examples/extensions/adk/litestar_aiosqlite.py @@ -9,7 +9,7 @@ from sqlspec.adapters.aiosqlite.adk import AiosqliteADKStore from sqlspec.extensions.adk import SQLSpecSessionService -config = AiosqliteConfig(pool_config={"database": ":memory:"}) +config = AiosqliteConfig(connection_config={"database": ":memory:"}) service: "SQLSpecSessionService | None" = None diff --git a/docs/examples/loaders/sql_files.py b/docs/examples/loaders/sql_files.py index 7b5e6c9de..84ae9fe9c 100644 --- a/docs/examples/loaders/sql_files.py +++ b/docs/examples/loaders/sql_files.py @@ -16,7 +16,7 @@ def main() -> None: loader = SQLFileLoader() loader.load_sql(QUERIES) registry = SQLSpec() - config = registry.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + config = registry.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with registry.provide_session(config) as session: session.execute( """ diff --git a/docs/examples/patterns/builder/select_and_insert.py b/docs/examples/patterns/builder/select_and_insert.py index e28d3ab2f..b859aed1f 100644 --- a/docs/examples/patterns/builder/select_and_insert.py +++ b/docs/examples/patterns/builder/select_and_insert.py @@ -10,7 +10,7 @@ def main() -> None: """Create a table, insert demo rows, and fetch results with the builder API.""" registry = SQLSpec() - config = registry.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + config = registry.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with registry.provide_session(config) as session: session.execute( """ diff --git a/docs/examples/patterns/configs/multi_adapter_registry.py b/docs/examples/patterns/configs/multi_adapter_registry.py index 60a818230..a638227b6 100644 --- a/docs/examples/patterns/configs/multi_adapter_registry.py +++ b/docs/examples/patterns/configs/multi_adapter_registry.py @@ -15,10 +15,10 @@ def build_registry() -> "SQLSpec": """Create a registry with both sync and async adapters.""" dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") registry = SQLSpec() - registry.add_config(SqliteConfig(bind_key="sync_sqlite", pool_config={"database": ":memory:"})) - registry.add_config(AiosqliteConfig(bind_key="async_sqlite", pool_config={"database": ":memory:"})) - registry.add_config(DuckDBConfig(bind_key="duckdb_docs", pool_config={"database": ":memory:docs_duck"})) - registry.add_config(AsyncpgConfig(bind_key="asyncpg_docs", pool_config=AsyncpgPoolConfig(dsn=dsn))) + registry.add_config(SqliteConfig(bind_key="sync_sqlite", connection_config={"database": ":memory:"})) + registry.add_config(AiosqliteConfig(bind_key="async_sqlite", connection_config={"database": ":memory:"})) + registry.add_config(DuckDBConfig(bind_key="duckdb_docs", connection_config={"database": ":memory:docs_duck"})) + registry.add_config(AsyncpgConfig(bind_key="asyncpg_docs", connection_config=AsyncpgPoolConfig(dsn=dsn))) return registry diff --git a/docs/examples/patterns/migrations/runner_basic.py b/docs/examples/patterns/migrations/runner_basic.py index 86ae1ec5a..2f646eb08 100644 --- a/docs/examples/patterns/migrations/runner_basic.py +++ b/docs/examples/patterns/migrations/runner_basic.py @@ -14,7 +14,7 @@ def _config() -> "SqliteConfig": """Return a SQLite config pointing at the bundled migration files.""" return SqliteConfig( - pool_config={"database": ":memory:"}, migration_config={"script_location": str(MIGRATIONS_PATH)} + connection_config={"database": ":memory:"}, migration_config={"script_location": str(MIGRATIONS_PATH)} ) diff --git a/docs/examples/patterns/multi_tenant/router.py b/docs/examples/patterns/multi_tenant/router.py index 0a403f62e..4a04f564c 100644 --- a/docs/examples/patterns/multi_tenant/router.py +++ b/docs/examples/patterns/multi_tenant/router.py @@ -13,7 +13,9 @@ class TenantRouter: """Maintain isolated SqliteConfig instances for each tenant slug.""" def __init__(self, tenants: "tuple[str, ...]") -> None: - self._configs = {slug: SqliteConfig(pool_config={"database": ":memory:"}, bind_key=slug) for slug in tenants} + self._configs = { + slug: SqliteConfig(connection_config={"database": ":memory:"}, bind_key=slug) for slug in tenants + } def insert_article(self, slug: str, title: str) -> None: config = self._configs[slug] diff --git a/docs/examples/patterns/stacks/query_stack_example.py b/docs/examples/patterns/stacks/query_stack_example.py index 5aabdc625..35f936f56 100644 --- a/docs/examples/patterns/stacks/query_stack_example.py +++ b/docs/examples/patterns/stacks/query_stack_example.py @@ -64,7 +64,7 @@ async def _seed_async_tables(session: "Any", user_id: int, roles: "tuple[str, .. def run_sync_example() -> None: """Execute the stack with the synchronous SQLite adapter.""" registry = SQLSpec() - config = registry.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + config = registry.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with registry.provide_session(config) as session: _seed_sync_tables(session, 1, ("admin", "editor")) results = session.execute_stack(build_stack(user_id=1, action="sync-login")) @@ -81,7 +81,7 @@ def run_async_example() -> None: async def _inner() -> None: registry = SQLSpec() - config = registry.add_config(AiosqliteConfig(pool_config={"database": ":memory:"})) + config = registry.add_config(AiosqliteConfig(connection_config={"database": ":memory:"})) async with registry.provide_session(config) as session: await _seed_async_tables(session, 2, ("viewer",)) results = await session.execute_stack(build_stack(user_id=2, action="async-login")) diff --git a/docs/examples/quickstart/quickstart_1.py b/docs/examples/quickstart/quickstart_1.py index 1987e0aac..0a9516207 100644 --- a/docs/examples/quickstart/quickstart_1.py +++ b/docs/examples/quickstart/quickstart_1.py @@ -7,7 +7,7 @@ def test_quickstart_1() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: result = session.execute("SELECT 'Hello, SQLSpec!' as message") diff --git a/docs/examples/quickstart/quickstart_2.py b/docs/examples/quickstart/quickstart_2.py index 60fb2ffb1..ca2eedad2 100644 --- a/docs/examples/quickstart/quickstart_2.py +++ b/docs/examples/quickstart/quickstart_2.py @@ -7,7 +7,7 @@ def test_quickstart_2() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: _ = session.execute( diff --git a/docs/examples/quickstart/quickstart_3.py b/docs/examples/quickstart/quickstart_3.py index 5917342f7..cff8a60f9 100644 --- a/docs/examples/quickstart/quickstart_3.py +++ b/docs/examples/quickstart/quickstart_3.py @@ -14,7 +14,7 @@ class User(BaseModel): email: str db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: _ = session.execute( diff --git a/docs/examples/quickstart/quickstart_4.py b/docs/examples/quickstart/quickstart_4.py index 6fb21a402..185de1902 100644 --- a/docs/examples/quickstart/quickstart_4.py +++ b/docs/examples/quickstart/quickstart_4.py @@ -20,7 +20,7 @@ class User(BaseModel): email: str db_manager = SQLSpec() - db = db_manager.add_config(AiosqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(AiosqliteConfig(connection_config={"database": ":memory:"})) async with db_manager.provide_session(db) as session: await session.execute( diff --git a/docs/examples/quickstart/quickstart_5.py b/docs/examples/quickstart/quickstart_5.py index ca32fd3e5..daa379f60 100644 --- a/docs/examples/quickstart/quickstart_5.py +++ b/docs/examples/quickstart/quickstart_5.py @@ -24,7 +24,7 @@ class User(BaseModel): name: str email: str - def pool_config() -> "dict[str, Any]": + def get_connection_config() -> "dict[str, Any]": return { "host": os.getenv("SQLSPEC_QUICKSTART_PG_HOST", "localhost"), "port": int(os.getenv("SQLSPEC_QUICKSTART_PG_PORT", "5432")), @@ -49,7 +49,7 @@ async def seed_users(session: Any) -> None: ) db_manager = SQLSpec() - db = db_manager.add_config(AsyncpgConfig(pool_config=pool_config())) + db = db_manager.add_config(AsyncpgConfig(connection_config=get_connection_config())) async with db_manager.provide_session(db) as session: await seed_users(session) diff --git a/docs/examples/quickstart/quickstart_6.py b/docs/examples/quickstart/quickstart_6.py index 1e60cb2ca..5443fe295 100644 --- a/docs/examples/quickstart/quickstart_6.py +++ b/docs/examples/quickstart/quickstart_6.py @@ -18,8 +18,8 @@ def test_quickstart_6(tmp_path: Path) -> None: analytics_db = tmp_path / "analytics.duckdb" db_manager = SQLSpec() - sqlite_db = db_manager.add_config(SqliteConfig(pool_config={"database": app_db.name})) - duckdb_db = db_manager.add_config(DuckDBConfig(pool_config={"database": analytics_db.name})) + sqlite_db = db_manager.add_config(SqliteConfig(connection_config={"database": app_db.name})) + duckdb_db = db_manager.add_config(DuckDBConfig(connection_config={"database": analytics_db.name})) with db_manager.provide_session(sqlite_db) as sqlite_session: users = sqlite_session.select("SELECT 1") diff --git a/docs/examples/quickstart/quickstart_7.py b/docs/examples/quickstart/quickstart_7.py index 7544a33a8..d379e566b 100644 --- a/docs/examples/quickstart/quickstart_7.py +++ b/docs/examples/quickstart/quickstart_7.py @@ -7,7 +7,7 @@ def test_quickstart_7() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: session.begin() diff --git a/docs/examples/quickstart/quickstart_8.py b/docs/examples/quickstart/quickstart_8.py index e5e80b75e..5f14a5178 100644 --- a/docs/examples/quickstart/quickstart_8.py +++ b/docs/examples/quickstart/quickstart_8.py @@ -9,7 +9,7 @@ def test_quickstart_8() -> None: query = sql.select("id", "name", "email").from_("qs8_users").where("age > ?").order_by("name") db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: session.execute( diff --git a/docs/examples/shared/configs.py b/docs/examples/shared/configs.py index 95705a3d0..7636747bd 100644 --- a/docs/examples/shared/configs.py +++ b/docs/examples/shared/configs.py @@ -11,7 +11,7 @@ def sqlite_registry(bind_key: str = "docs_sqlite") -> "tuple[SQLSpec, SqliteConfig]": """Return a registry with a single SQLite configuration.""" registry = SQLSpec() - config = registry.add_config(SqliteConfig(bind_key=bind_key, pool_config={"database": ":memory:"})) + config = registry.add_config(SqliteConfig(bind_key=bind_key, connection_config={"database": ":memory:"})) return registry, config @@ -21,7 +21,7 @@ def aiosqlite_registry(bind_key: str = "docs_aiosqlite") -> "tuple[SQLSpec, Aios config = registry.add_config( AiosqliteConfig( bind_key=bind_key, - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"litestar": {"commit_mode": "autocommit"}}, ) ) @@ -34,7 +34,7 @@ def duckdb_registry(bind_key: str = "docs_duckdb") -> "tuple[SQLSpec, DuckDBConf config = registry.add_config( DuckDBConfig( bind_key=bind_key, - pool_config={"database": ":memory:shared_docs"}, + connection_config={"database": ":memory:shared_docs"}, extension_config={"litestar": {"commit_mode": "autocommit"}}, ) ) diff --git a/docs/examples/usage/usage_cli_1.py b/docs/examples/usage/usage_cli_1.py index d8f6a9c83..deba406d2 100644 --- a/docs/examples/usage/usage_cli_1.py +++ b/docs/examples/usage/usage_cli_1.py @@ -7,7 +7,7 @@ def test_single_and_multiple_configs() -> None: # start-example # Single config db_config = AsyncpgConfig( - pool_config={"dsn": "postgresql://user:pass@localhost/mydb"}, + connection_config={"dsn": "postgresql://user:pass@localhost/mydb"}, migration_config={"script_location": "migrations", "enabled": True}, ) @@ -15,7 +15,7 @@ def test_single_and_multiple_configs() -> None: configs = [ AsyncpgConfig( bind_key="postgres", - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, migration_config={"script_location": "migrations/postgres"}, ) # ... more configs diff --git a/docs/examples/usage/usage_cli_2.py b/docs/examples/usage/usage_cli_2.py index 22587d20c..def00fb53 100644 --- a/docs/examples/usage/usage_cli_2.py +++ b/docs/examples/usage/usage_cli_2.py @@ -9,17 +9,17 @@ def test_multi_config() -> None: configs = [ AsyncpgConfig( bind_key="postgres", - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, migration_config={"script_location": "migrations/postgres", "enabled": True}, ), AsyncmyConfig( bind_key="mysql", - pool_config={"host": "localhost", "database": "mydb"}, + connection_config={"host": "localhost", "database": "mydb"}, migration_config={"script_location": "migrations/mysql", "enabled": True}, ), AsyncpgConfig( bind_key="analytics", - pool_config={"dsn": "postgresql://analytics/..."}, + connection_config={"dsn": "postgresql://analytics/..."}, migration_config={"script_location": "migrations/analytics", "enabled": True}, ), ] diff --git a/docs/examples/usage/usage_configuration_1.py b/docs/examples/usage/usage_configuration_1.py index c7526d7db..647aca658 100644 --- a/docs/examples/usage/usage_configuration_1.py +++ b/docs/examples/usage/usage_configuration_1.py @@ -12,7 +12,7 @@ def test_sqlite_memory_db() -> None: db_manager = SQLSpec() # Add database configuration - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) # Use the database with db_manager.provide_session(db) as session: diff --git a/docs/examples/usage/usage_configuration_10.py b/docs/examples/usage/usage_configuration_10.py index d94be348c..e211384a8 100644 --- a/docs/examples/usage/usage_configuration_10.py +++ b/docs/examples/usage/usage_configuration_10.py @@ -17,7 +17,7 @@ async def test_manual_pool() -> None: pool = await asyncpg.create_pool( dsn=os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db"), min_size=10, max_size=20 ) - db = AsyncpgConfig(pool_instance=pool) + db = AsyncpgConfig(connection_instance=pool) # end-example - assert db.pool_instance is pool + assert db.connection_instance is pool await pool.close() diff --git a/docs/examples/usage/usage_configuration_11.py b/docs/examples/usage/usage_configuration_11.py index 6d6415eef..2c2c4f160 100644 --- a/docs/examples/usage/usage_configuration_11.py +++ b/docs/examples/usage/usage_configuration_11.py @@ -6,6 +6,6 @@ def test_thread_local_connections() -> None: # start-example from sqlspec.adapters.sqlite import SqliteConfig - config = SqliteConfig(pool_config={"database": "test.db"}) + config = SqliteConfig(connection_config={"database": "test.db"}) # end-example - assert config.pool_config["database"] == "test.db" + assert config.connection_config["database"] == "test.db" diff --git a/docs/examples/usage/usage_configuration_12.py b/docs/examples/usage/usage_configuration_12.py index 7b86b3aee..868428c85 100644 --- a/docs/examples/usage/usage_configuration_12.py +++ b/docs/examples/usage/usage_configuration_12.py @@ -23,7 +23,7 @@ def test_basic_statement_config() -> None: # Apply to adapter dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") - config = AsyncpgConfig(pool_config={"dsn": dsn}, statement_config=statement_config) + config = AsyncpgConfig(connection_config={"dsn": dsn}, statement_config=statement_config) # end-example assert config.statement_config.dialect == "postgres" assert config.statement_config.enable_parsing is True diff --git a/docs/examples/usage/usage_configuration_16.py b/docs/examples/usage/usage_configuration_16.py index 3f257a915..03cb58127 100644 --- a/docs/examples/usage/usage_configuration_16.py +++ b/docs/examples/usage/usage_configuration_16.py @@ -17,7 +17,7 @@ def test_per_instance_cache_config() -> None: db_manager.update_cache_config(CacheConfig(sql_cache_enabled=True, sql_cache_size=500)) # Add database config - db = db_manager.add_config(SqliteConfig(pool_config={"database": tmp.name})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": tmp.name})) # Use the configured spec with db_manager.provide_session(db) as session: diff --git a/docs/examples/usage/usage_configuration_17.py b/docs/examples/usage/usage_configuration_17.py index 21b6e1782..b49e007b1 100644 --- a/docs/examples/usage/usage_configuration_17.py +++ b/docs/examples/usage/usage_configuration_17.py @@ -14,7 +14,7 @@ def test_cache_statistics() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": tmp.name})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": tmp.name})) # Execute some queries to generate cache activity with db_manager.provide_session(db) as session: diff --git a/docs/examples/usage/usage_configuration_19.py b/docs/examples/usage/usage_configuration_19.py index ec013ccb4..666dcab43 100644 --- a/docs/examples/usage/usage_configuration_19.py +++ b/docs/examples/usage/usage_configuration_19.py @@ -21,14 +21,14 @@ def test_binding_multiple_configs() -> None: db_manager = SQLSpec() # Add multiple configurations - add_config returns the config instance - sqlite_config = db_manager.add_config(SqliteConfig(pool_config={"database": tmp.name})) + sqlite_config = db_manager.add_config(SqliteConfig(connection_config={"database": tmp.name})) dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") - pg_config = db_manager.add_config(AsyncpgConfig(pool_config={"dsn": dsn})) + pg_config = db_manager.add_config(AsyncpgConfig(connection_config={"dsn": dsn})) # Use specific configuration - pass the config instance directly with db_manager.provide_session(sqlite_config) as session: session.execute("SELECT 1") # end-example - assert sqlite_config.pool_config["database"] == tmp.name - assert pg_config.pool_config["dsn"] == os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") + assert sqlite_config.connection_config["database"] == tmp.name + assert pg_config.connection_config["dsn"] == os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") diff --git a/docs/examples/usage/usage_configuration_2.py b/docs/examples/usage/usage_configuration_2.py index b2632e300..1a749e887 100644 --- a/docs/examples/usage/usage_configuration_2.py +++ b/docs/examples/usage/usage_configuration_2.py @@ -11,7 +11,7 @@ def test_sqlite_config_setup(tmp_path: Path) -> None: database_file = tmp_path / "myapp.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database_file.name, # Database file path "timeout": 5.0, # Lock timeout in seconds "check_same_thread": False, # Allow multi-thread access @@ -20,4 +20,4 @@ def test_sqlite_config_setup(tmp_path: Path) -> None: } ) # end-example - assert config.pool_config["database"] == "myapp.db" + assert config.connection_config["database"] == "myapp.db" diff --git a/docs/examples/usage/usage_configuration_20.py b/docs/examples/usage/usage_configuration_20.py index 5d5ab5d68..c38b78c8e 100644 --- a/docs/examples/usage/usage_configuration_20.py +++ b/docs/examples/usage/usage_configuration_20.py @@ -21,9 +21,11 @@ def test_named_bindings() -> None: db_manager = SQLSpec() # Add with bind keys - add_config returns the config instance - cache_config = db_manager.add_config(SqliteConfig(pool_config={"database": tmp.name}, bind_key="cache_db")) + cache_config = db_manager.add_config( + SqliteConfig(connection_config={"database": tmp.name}, bind_key="cache_db") + ) dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") - main_config = db_manager.add_config(AsyncpgConfig(pool_config={"dsn": dsn}, bind_key="main_db")) + main_config = db_manager.add_config(AsyncpgConfig(connection_config={"dsn": dsn}, bind_key="main_db")) # Access by config instance directly with db_manager.provide_session(cache_config) as session: diff --git a/docs/examples/usage/usage_configuration_21.py b/docs/examples/usage/usage_configuration_21.py index f0b7c2f5c..1865feb55 100644 --- a/docs/examples/usage/usage_configuration_21.py +++ b/docs/examples/usage/usage_configuration_21.py @@ -16,7 +16,7 @@ def test_basic_migration_config() -> None: dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") config = AsyncpgConfig( - pool_config={"dsn": dsn}, + connection_config={"dsn": dsn}, extension_config={ "litestar": {"session_table": "custom_sessions"} # Extension settings }, diff --git a/docs/examples/usage/usage_configuration_22.py b/docs/examples/usage/usage_configuration_22.py index 6c974ba22..c82f310a9 100644 --- a/docs/examples/usage/usage_configuration_22.py +++ b/docs/examples/usage/usage_configuration_22.py @@ -14,7 +14,7 @@ def test_basic_migration_config() -> None: dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") config = AsyncpgConfig( - pool_config={"dsn": dsn}, + connection_config={"dsn": dsn}, extension_config={ "litestar": {"session_table": "custom_sessions"} # Extension settings }, diff --git a/docs/examples/usage/usage_configuration_23.py b/docs/examples/usage/usage_configuration_23.py index ce281f2fc..e65fbf678 100644 --- a/docs/examples/usage/usage_configuration_23.py +++ b/docs/examples/usage/usage_configuration_23.py @@ -15,7 +15,7 @@ def test_extension_config() -> None: dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") config = AsyncpgConfig( - pool_config={"dsn": dsn}, + connection_config={"dsn": dsn}, extension_config={ "litestar": { "connection_key": "db_connection", diff --git a/docs/examples/usage/usage_configuration_24.py b/docs/examples/usage/usage_configuration_24.py index bfcbd7b04..4e3ee0e87 100644 --- a/docs/examples/usage/usage_configuration_24.py +++ b/docs/examples/usage/usage_configuration_24.py @@ -23,7 +23,7 @@ def test_environment_based_configuration() -> None: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={ + connection_config={ "host": os.getenv("DB_HOST", "localhost"), "port": int(os.getenv("DB_PORT", "5432")), "user": os.getenv("DB_USER"), @@ -33,8 +33,8 @@ def test_environment_based_configuration() -> None: ) # end-example - assert config.pool_config["host"] == "testhost" - assert config.pool_config["port"] == 5433 - assert config.pool_config["user"] == "testuser" - assert config.pool_config["password"] == "testpass" - assert config.pool_config["database"] == "testdb" + assert config.connection_config["host"] == "testhost" + assert config.connection_config["port"] == 5433 + assert config.connection_config["user"] == "testuser" + assert config.connection_config["password"] == "testpass" + assert config.connection_config["database"] == "testdb" diff --git a/docs/examples/usage/usage_configuration_25.py b/docs/examples/usage/usage_configuration_25.py index b76f22ee8..2a4d61e42 100644 --- a/docs/examples/usage/usage_configuration_25.py +++ b/docs/examples/usage/usage_configuration_25.py @@ -15,9 +15,9 @@ def test_connection_pooling_best_practice() -> None: from sqlspec.adapters.asyncpg import AsyncpgConfig dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") - config = AsyncpgConfig(pool_config={"dsn": dsn, "min_size": 10, "max_size": 20}) + config = AsyncpgConfig(connection_config={"dsn": dsn, "min_size": 10, "max_size": 20}) # end-example - assert config.pool_config["min_size"] == 10 - assert config.pool_config["max_size"] == 20 + assert config.connection_config["min_size"] == 10 + assert config.connection_config["max_size"] == 20 assert config.supports_connection_pooling is True diff --git a/docs/examples/usage/usage_configuration_27.py b/docs/examples/usage/usage_configuration_27.py index 2101d90c4..7949775e9 100644 --- a/docs/examples/usage/usage_configuration_27.py +++ b/docs/examples/usage/usage_configuration_27.py @@ -14,12 +14,12 @@ def test_tune_pool_sizes_best_practice() -> None: # start-example # CPU-bound workload - smaller pool - cpu_bound_pool_config = {"min_size": 5, "max_size": 10} + cpu_bound_connection_config = {"min_size": 5, "max_size": 10} # end-example - assert cpu_bound_pool_config["min_size"] == 5 - assert cpu_bound_pool_config["max_size"] == 10 + assert cpu_bound_connection_config["min_size"] == 5 + assert cpu_bound_connection_config["max_size"] == 10 # I/O-bound workload - larger pool - io_bound_pool_config = {"min_size": 20, "max_size": 50} - assert io_bound_pool_config["min_size"] == 20 - assert io_bound_pool_config["max_size"] == 50 + io_bound_connection_config = {"min_size": 20, "max_size": 50} + assert io_bound_connection_config["min_size"] == 20 + assert io_bound_connection_config["max_size"] == 50 diff --git a/docs/examples/usage/usage_configuration_29.py b/docs/examples/usage/usage_configuration_29.py index 002a0e457..b689bfc70 100644 --- a/docs/examples/usage/usage_configuration_29.py +++ b/docs/examples/usage/usage_configuration_29.py @@ -17,7 +17,7 @@ async def test_cleanup_resources_best_practice() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: db_manager = SQLSpec() - db = db_manager.add_config(AiosqliteConfig(pool_config={"database": tmp.name})) + db = db_manager.add_config(AiosqliteConfig(connection_config={"database": tmp.name})) # Use the connection async with db_manager.provide_session(db) as session: @@ -28,4 +28,4 @@ async def test_cleanup_resources_best_practice() -> None: # Verify pools are closed # end-example - assert db.pool_instance is None or not hasattr(db.pool_instance, "_pool") + assert db.connection_instance is None or not hasattr(db.connection_instance, "_pool") diff --git a/docs/examples/usage/usage_configuration_3.py b/docs/examples/usage/usage_configuration_3.py index 77f9f1657..94f469ca2 100644 --- a/docs/examples/usage/usage_configuration_3.py +++ b/docs/examples/usage/usage_configuration_3.py @@ -7,10 +7,10 @@ def test_memory_databases() -> None: from sqlspec.adapters.sqlite import SqliteConfig # In-memory database (isolated per connection) - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) # end-example - assert ":memory_" in config.pool_config["database"] + assert ":memory_" in config.connection_config["database"] # Shared memory database - shared_config = SqliteConfig(pool_config={"database": "file:memdb1?mode=memory&cache=shared", "uri": True}) - assert shared_config.pool_config["database"] == "file:memdb1?mode=memory&cache=shared" + shared_config = SqliteConfig(connection_config={"database": "file:memdb1?mode=memory&cache=shared", "uri": True}) + assert shared_config.connection_config["database"] == "file:memdb1?mode=memory&cache=shared" diff --git a/docs/examples/usage/usage_configuration_30.py b/docs/examples/usage/usage_configuration_30.py index 47e74a359..6d91361fe 100644 --- a/docs/examples/usage/usage_configuration_30.py +++ b/docs/examples/usage/usage_configuration_30.py @@ -10,7 +10,7 @@ def test_telemetry_snapshot() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: session.execute("SELECT 1") diff --git a/docs/examples/usage/usage_configuration_4.py b/docs/examples/usage/usage_configuration_4.py index e8897f951..ab73baf82 100644 --- a/docs/examples/usage/usage_configuration_4.py +++ b/docs/examples/usage/usage_configuration_4.py @@ -20,7 +20,7 @@ def test_asyncpg_config_setup() -> None: dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", f"postgresql://{user}:{password}@{host}:{port}/{database}") config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": dsn, "min_size": 10, "max_size": 20, @@ -32,4 +32,4 @@ def test_asyncpg_config_setup() -> None: } ) # end-example - assert config.pool_config["host"] == host + assert config.connection_config["host"] == host diff --git a/docs/examples/usage/usage_configuration_5.py b/docs/examples/usage/usage_configuration_5.py index b8b6de2c9..176d16905 100644 --- a/docs/examples/usage/usage_configuration_5.py +++ b/docs/examples/usage/usage_configuration_5.py @@ -21,7 +21,7 @@ def test_psycopg_config_setup() -> None: # Async version config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": dsn, # Or keyword arguments: "host": host, @@ -36,4 +36,4 @@ def test_psycopg_config_setup() -> None: } ) # end-example - assert config.pool_config is not None + assert config.connection_config is not None diff --git a/docs/examples/usage/usage_configuration_6.py b/docs/examples/usage/usage_configuration_6.py index 4fd656240..3b41cb700 100644 --- a/docs/examples/usage/usage_configuration_6.py +++ b/docs/examples/usage/usage_configuration_6.py @@ -6,7 +6,7 @@ def test_asyncmy_config_setup() -> None: from sqlspec.adapters.asyncmy import AsyncmyConfig config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 3306, "user": "myuser", @@ -19,4 +19,4 @@ def test_asyncmy_config_setup() -> None: } ) # end-example - assert config.pool_config["port"] == 3306 + assert config.connection_config["port"] == 3306 diff --git a/docs/examples/usage/usage_configuration_7.py b/docs/examples/usage/usage_configuration_7.py index cb85b76e7..7023ccb3c 100644 --- a/docs/examples/usage/usage_configuration_7.py +++ b/docs/examples/usage/usage_configuration_7.py @@ -11,8 +11,8 @@ def test_duckdb_config_setup(tmp_path: Path) -> None: in_memory_config = DuckDBConfig() # end-example - assert in_memory_config.pool_config.get("database") == ":memory:shared_db" + assert in_memory_config.connection_config.get("database") == ":memory:shared_db" database_file = tmp_path / "analytics.duckdb" - persistent_config = DuckDBConfig(pool_config={"database": database_file.name, "read_only": False}) - assert persistent_config.pool_config["read_only"] is False + persistent_config = DuckDBConfig(connection_config={"database": database_file.name, "read_only": False}) + assert persistent_config.connection_config["read_only"] is False diff --git a/docs/examples/usage/usage_configuration_8.py b/docs/examples/usage/usage_configuration_8.py index cf9c0deb5..50d9f458a 100644 --- a/docs/examples/usage/usage_configuration_8.py +++ b/docs/examples/usage/usage_configuration_8.py @@ -14,7 +14,7 @@ def test_asyncpg_pool_setup() -> None: dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": dsn, "min_size": 10, "max_size": 20, @@ -23,4 +23,4 @@ def test_asyncpg_pool_setup() -> None: } ) # end-example - assert config.pool_config["min_size"] == 10 + assert config.connection_config["min_size"] == 10 diff --git a/docs/examples/usage/usage_configuration_9.py b/docs/examples/usage/usage_configuration_9.py index 677d5931c..2d158a980 100644 --- a/docs/examples/usage/usage_configuration_9.py +++ b/docs/examples/usage/usage_configuration_9.py @@ -15,8 +15,8 @@ def test_pool_lifecycle() -> None: db_manager = SQLSpec() dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") - asyncpg_config = db_manager.add_config(AsyncpgConfig(pool_config={"dsn": dsn})) + asyncpg_config = db_manager.add_config(AsyncpgConfig(connection_config={"dsn": dsn})) # The config instance is now the handle - add_config returns it directly # end-example - assert asyncpg_config.pool_config["dsn"] == dsn + assert asyncpg_config.connection_config["dsn"] == dsn diff --git a/docs/examples/usage/usage_data_flow_11.py b/docs/examples/usage/usage_data_flow_11.py index a3154f6d6..d304e8d4a 100644 --- a/docs/examples/usage/usage_data_flow_11.py +++ b/docs/examples/usage/usage_data_flow_11.py @@ -11,7 +11,7 @@ def test_sql_result_object() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) # start-example with db_manager.provide_session(db) as session: diff --git a/docs/examples/usage/usage_data_flow_12.py b/docs/examples/usage/usage_data_flow_12.py index a4be326fc..07153fa97 100644 --- a/docs/examples/usage/usage_data_flow_12.py +++ b/docs/examples/usage/usage_data_flow_12.py @@ -9,7 +9,7 @@ def test_convenience_methods() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: # Create a test table diff --git a/docs/examples/usage/usage_data_flow_13.py b/docs/examples/usage/usage_data_flow_13.py index 0b3d97954..6f16524b8 100644 --- a/docs/examples/usage/usage_data_flow_13.py +++ b/docs/examples/usage/usage_data_flow_13.py @@ -19,7 +19,7 @@ class User(BaseModel): is_active: bool | None = True db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: # Create test table diff --git a/docs/examples/usage/usage_data_flow_8.py b/docs/examples/usage/usage_data_flow_8.py index f2cec2fb1..382919730 100644 --- a/docs/examples/usage/usage_data_flow_8.py +++ b/docs/examples/usage/usage_data_flow_8.py @@ -9,7 +9,7 @@ def test_statement_execution() -> None: from sqlspec.adapters.sqlite import SqliteConfig db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) # start-example sql_statement = SQL("SELECT ? AS message", "pipeline-complete") diff --git a/docs/examples/usage/usage_data_flow_9.py b/docs/examples/usage/usage_data_flow_9.py index 3d1af1488..2e23ec537 100644 --- a/docs/examples/usage/usage_data_flow_9.py +++ b/docs/examples/usage/usage_data_flow_9.py @@ -13,7 +13,7 @@ def test_driver_execution() -> None: # start-example # Driver receives compiled SQL and parameters db_manager = SQLSpec() - db = db_manager.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = db_manager.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with db_manager.provide_session(db) as session: message = session.select_value("SELECT 'test' as message") print(message) diff --git a/docs/examples/usage/usage_drivers_and_querying_1.py b/docs/examples/usage/usage_drivers_and_querying_1.py index 37338d2a2..43b788e40 100644 --- a/docs/examples/usage/usage_drivers_and_querying_1.py +++ b/docs/examples/usage/usage_drivers_and_querying_1.py @@ -26,7 +26,7 @@ async def test_importable_1(postgres_service: PostgresService) -> None: db = spec.add_config( AsyncpgConfig( - pool_config=AsyncpgPoolConfig(host=host, port=port, user=user, password=password, database=database) + connection_config=AsyncpgPoolConfig(host=host, port=port, user=user, password=password, database=database) ) ) # Config layer, registers pool async with spec.provide_session(db) as session: # Session layer diff --git a/docs/examples/usage/usage_drivers_and_querying_10.py b/docs/examples/usage/usage_drivers_and_querying_10.py index fee35e252..836e5b171 100644 --- a/docs/examples/usage/usage_drivers_and_querying_10.py +++ b/docs/examples/usage/usage_drivers_and_querying_10.py @@ -24,7 +24,7 @@ def test_example_10_duckdb_config(tmp_path: Path) -> None: spec = SQLSpec() # In-memory in_memory_db = spec.add_config(DuckDBConfig()) - persistent_db = spec.add_config(DuckDBConfig(pool_config={"database": str(db_path)})) + persistent_db = spec.add_config(DuckDBConfig(connection_config={"database": str(db_path)})) try: # Test with in-memory config diff --git a/docs/examples/usage/usage_drivers_and_querying_11.py b/docs/examples/usage/usage_drivers_and_querying_11.py index 36a19bc50..3701d0c4f 100644 --- a/docs/examples/usage/usage_drivers_and_querying_11.py +++ b/docs/examples/usage/usage_drivers_and_querying_11.py @@ -16,7 +16,7 @@ def test_example_11_oracledb_config(oracle_service: OracleService) -> None: spec = SQLSpec() config = OracleSyncConfig( - pool_config={ + connection_config={ "user": oracle_service.user, "password": oracle_service.password, "host": oracle_service.host, diff --git a/docs/examples/usage/usage_drivers_and_querying_13.py b/docs/examples/usage/usage_drivers_and_querying_13.py index 6a01eac52..f15ce684a 100644 --- a/docs/examples/usage/usage_drivers_and_querying_13.py +++ b/docs/examples/usage/usage_drivers_and_querying_13.py @@ -10,7 +10,7 @@ def test_example_13_placeholder() -> None: from sqlspec.adapters.sqlite import SqliteConfig spec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:", "timeout": 5.0, "check_same_thread": False}) + config = SqliteConfig(connection_config={"database": ":memory:", "timeout": 5.0, "check_same_thread": False}) with spec.provide_session(config) as session: create_table_query = ( """create table if not exists users (id default int primary key, name varchar(128), email text)""" diff --git a/docs/examples/usage/usage_drivers_and_querying_14.py b/docs/examples/usage/usage_drivers_and_querying_14.py index c7d5dd514..335064968 100644 --- a/docs/examples/usage/usage_drivers_and_querying_14.py +++ b/docs/examples/usage/usage_drivers_and_querying_14.py @@ -11,7 +11,7 @@ def test_example_14_placeholder() -> None: from sqlspec.adapters.sqlite import SqliteConfig spec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:", "timeout": 5.0, "check_same_thread": False}) + config = SqliteConfig(connection_config={"database": ":memory:", "timeout": 5.0, "check_same_thread": False}) with spec.provide_session(config) as session: create_table_query = """create table if not exists users (id default int primary key, name varchar(128), email text, status varchar(32))""" diff --git a/docs/examples/usage/usage_drivers_and_querying_15.py b/docs/examples/usage/usage_drivers_and_querying_15.py index a8ea91800..3fbedd377 100644 --- a/docs/examples/usage/usage_drivers_and_querying_15.py +++ b/docs/examples/usage/usage_drivers_and_querying_15.py @@ -11,7 +11,7 @@ def test_example_15_placeholder() -> None: spec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}) # Thread local pooling + config = SqliteConfig(connection_config={"database": ":memory:"}) # Thread local pooling with spec.provide_session(config) as session: create_users_table_query = """create table if not exists users (id default int primary key, name varchar(128), email text, status varchar(32)); """ diff --git a/docs/examples/usage/usage_drivers_and_querying_16.py b/docs/examples/usage/usage_drivers_and_querying_16.py index a06fab5d3..14d007c59 100644 --- a/docs/examples/usage/usage_drivers_and_querying_16.py +++ b/docs/examples/usage/usage_drivers_and_querying_16.py @@ -18,7 +18,7 @@ @pytest.mark.anyio async def test_example_16_async_transactions() -> None: spec = SQLSpec() - config = spec.add_config(AiosqliteConfig(pool_config={"database": ":memory:"})) + config = spec.add_config(AiosqliteConfig(connection_config={"database": ":memory:"})) # start-example @asynccontextmanager diff --git a/docs/examples/usage/usage_drivers_and_querying_17.py b/docs/examples/usage/usage_drivers_and_querying_17.py index 583d78b43..0e8844ae7 100644 --- a/docs/examples/usage/usage_drivers_and_querying_17.py +++ b/docs/examples/usage/usage_drivers_and_querying_17.py @@ -8,7 +8,7 @@ def test_example_17_positional_parameters() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, status TEXT, name TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_18.py b/docs/examples/usage/usage_drivers_and_querying_18.py index fd51f68fa..a1399d758 100644 --- a/docs/examples/usage/usage_drivers_and_querying_18.py +++ b/docs/examples/usage/usage_drivers_and_querying_18.py @@ -8,7 +8,7 @@ def test_example_18_named_parameters() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE logs (id INTEGER PRIMARY KEY, action TEXT, created_at TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_19.py b/docs/examples/usage/usage_drivers_and_querying_19.py index 01efc67ee..21d49a916 100644 --- a/docs/examples/usage/usage_drivers_and_querying_19.py +++ b/docs/examples/usage/usage_drivers_and_querying_19.py @@ -10,7 +10,7 @@ def test_example_19_type_coercion() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE events (id INTEGER PRIMARY KEY, is_active INTEGER, ts TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_2.py b/docs/examples/usage/usage_drivers_and_querying_2.py index 17577bb15..86aa6b2e2 100644 --- a/docs/examples/usage/usage_drivers_and_querying_2.py +++ b/docs/examples/usage/usage_drivers_and_querying_2.py @@ -18,7 +18,7 @@ async def test_example_2_importable(postgres_service: PostgresService) -> None: spec = SQLSpec() dsn = os.environ.get("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/test") - db = spec.add_config(AsyncpgConfig(pool_config={"dsn": dsn, "min_size": 10, "max_size": 20})) + db = spec.add_config(AsyncpgConfig(connection_config={"dsn": dsn, "min_size": 10, "max_size": 20})) async with spec.provide_session(db) as session: create_table_query = """ CREATE TABLE IF NOT EXISTS usage2_users ( diff --git a/docs/examples/usage/usage_drivers_and_querying_20.py b/docs/examples/usage/usage_drivers_and_querying_20.py index 64c447f32..037bc4cca 100644 --- a/docs/examples/usage/usage_drivers_and_querying_20.py +++ b/docs/examples/usage/usage_drivers_and_querying_20.py @@ -8,7 +8,7 @@ def test_example_20_script_execution() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: # start-example diff --git a/docs/examples/usage/usage_drivers_and_querying_21.py b/docs/examples/usage/usage_drivers_and_querying_21.py index 157e52f9d..7609df097 100644 --- a/docs/examples/usage/usage_drivers_and_querying_21.py +++ b/docs/examples/usage/usage_drivers_and_querying_21.py @@ -8,15 +8,15 @@ pytestmark = pytest.mark.xdist_group("postgres") -__all__ = ("test_example_21_pool_config",) +__all__ = ("test_example_21_connection_config",) -def test_example_21_pool_config() -> None: +def test_example_21_connection_config() -> None: dsn = os.environ.get("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/test") # start-example - config = AsyncpgConfig(pool_config={"dsn": dsn, "min_size": 10, "max_size": 20}) + config = AsyncpgConfig(connection_config={"dsn": dsn, "min_size": 10, "max_size": 20}) # end-example - assert config.pool_config["min_size"] == 10 - assert config.pool_config["max_size"] == 20 + assert config.connection_config["min_size"] == 10 + assert config.connection_config["max_size"] == 20 diff --git a/docs/examples/usage/usage_drivers_and_querying_22.py b/docs/examples/usage/usage_drivers_and_querying_22.py index 02aa9f398..5b4689b06 100644 --- a/docs/examples/usage/usage_drivers_and_querying_22.py +++ b/docs/examples/usage/usage_drivers_and_querying_22.py @@ -8,7 +8,7 @@ def test_example_22_batch_operations() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_23.py b/docs/examples/usage/usage_drivers_and_querying_23.py index ad1038bfe..51b2271c3 100644 --- a/docs/examples/usage/usage_drivers_and_querying_23.py +++ b/docs/examples/usage/usage_drivers_and_querying_23.py @@ -8,7 +8,7 @@ def test_example_23_select_value() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE metrics (id INTEGER PRIMARY KEY, count INTEGER)") diff --git a/docs/examples/usage/usage_drivers_and_querying_24.py b/docs/examples/usage/usage_drivers_and_querying_24.py index f4f30dbf8..0c2d52d3e 100644 --- a/docs/examples/usage/usage_drivers_and_querying_24.py +++ b/docs/examples/usage/usage_drivers_and_querying_24.py @@ -8,7 +8,7 @@ def test_example_24_select_one_or_none() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE emails (id INTEGER PRIMARY KEY, address TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_25.py b/docs/examples/usage/usage_drivers_and_querying_25.py index 0dcf5602e..255e27c26 100644 --- a/docs/examples/usage/usage_drivers_and_querying_25.py +++ b/docs/examples/usage/usage_drivers_and_querying_25.py @@ -8,7 +8,7 @@ def test_example_25_sql_result_helpers() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE tasks (id INTEGER PRIMARY KEY, status TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_26.py b/docs/examples/usage/usage_drivers_and_querying_26.py index 606020a72..cb5e5de2c 100644 --- a/docs/examples/usage/usage_drivers_and_querying_26.py +++ b/docs/examples/usage/usage_drivers_and_querying_26.py @@ -8,7 +8,7 @@ def test_example_26_statement_stack() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) stack = StatementStack() stack = stack.push_execute("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_27.py b/docs/examples/usage/usage_drivers_and_querying_27.py index 548d2e4bc..b4dd76f8e 100644 --- a/docs/examples/usage/usage_drivers_and_querying_27.py +++ b/docs/examples/usage/usage_drivers_and_querying_27.py @@ -8,7 +8,7 @@ def test_example_27_execute_many_dict() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE audit (id INTEGER PRIMARY KEY, action TEXT, user_id INTEGER)") diff --git a/docs/examples/usage/usage_drivers_and_querying_28.py b/docs/examples/usage/usage_drivers_and_querying_28.py index c767920e0..765029da6 100644 --- a/docs/examples/usage/usage_drivers_and_querying_28.py +++ b/docs/examples/usage/usage_drivers_and_querying_28.py @@ -16,7 +16,7 @@ class User: def test_example_28_schema_mapping() -> None: spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(db) as session: session.execute("CREATE TABLE users (id INTEGER PRIMARY KEY, name TEXT)") diff --git a/docs/examples/usage/usage_drivers_and_querying_3.py b/docs/examples/usage/usage_drivers_and_querying_3.py index b05d23b3f..d988dc6cd 100644 --- a/docs/examples/usage/usage_drivers_and_querying_3.py +++ b/docs/examples/usage/usage_drivers_and_querying_3.py @@ -20,7 +20,7 @@ def test_example_3_sync(postgres_service: PostgresService) -> None: dsn = os.environ.get("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/test") # Sync version - config = PsycopgSyncConfig(pool_config={"conninfo": dsn, "min_size": 5, "max_size": 10}) + config = PsycopgSyncConfig(connection_config={"conninfo": dsn, "min_size": 5, "max_size": 10}) db = spec.add_config(config) with spec.provide_session(db) as session: diff --git a/docs/examples/usage/usage_drivers_and_querying_4.py b/docs/examples/usage/usage_drivers_and_querying_4.py index 1f3fbb205..2482acb4d 100644 --- a/docs/examples/usage/usage_drivers_and_querying_4.py +++ b/docs/examples/usage/usage_drivers_and_querying_4.py @@ -20,7 +20,7 @@ async def test_example_4_async(postgres_service: PostgresService) -> None: dsn = os.environ.get("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/test") # Async version - config = PsycopgAsyncConfig(pool_config={"conninfo": dsn, "min_size": 5, "max_size": 10}) + config = PsycopgAsyncConfig(connection_config={"conninfo": dsn, "min_size": 5, "max_size": 10}) db = spec.add_config(config) async with spec.provide_session(db) as session: diff --git a/docs/examples/usage/usage_drivers_and_querying_5.py b/docs/examples/usage/usage_drivers_and_querying_5.py index eb7d25bbb..7de572d49 100644 --- a/docs/examples/usage/usage_drivers_and_querying_5.py +++ b/docs/examples/usage/usage_drivers_and_querying_5.py @@ -18,7 +18,7 @@ async def test_example_5_construct_config(postgres_service: PostgresService) -> # start-example spec = SQLSpec() dsn = os.environ.get("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/test") - config = PsqlpyConfig(pool_config={"dsn": dsn}) + config = PsqlpyConfig(connection_config={"dsn": dsn}) assert config is not None async with spec.provide_session(config) as session: create_table_query = """CREATE TABLE IF NOT EXISTS usage5_users ( diff --git a/docs/examples/usage/usage_drivers_and_querying_6.py b/docs/examples/usage/usage_drivers_and_querying_6.py index 9d848115f..5a67f2d7f 100644 --- a/docs/examples/usage/usage_drivers_and_querying_6.py +++ b/docs/examples/usage/usage_drivers_and_querying_6.py @@ -20,7 +20,7 @@ def test_example_6_sqlite_config(tmp_path: Path) -> None: spec = SQLSpec() db = spec.add_config( - SqliteConfig(pool_config={"database": db_path, "timeout": 5.0, "check_same_thread": False}) + SqliteConfig(connection_config={"database": db_path, "timeout": 5.0, "check_same_thread": False}) ) try: diff --git a/docs/examples/usage/usage_drivers_and_querying_7.py b/docs/examples/usage/usage_drivers_and_querying_7.py index f7bb561dc..f24c1bb58 100644 --- a/docs/examples/usage/usage_drivers_and_querying_7.py +++ b/docs/examples/usage/usage_drivers_and_querying_7.py @@ -8,6 +8,6 @@ def test_example_7_sync_sqlite() -> None: # start-example from sqlspec.adapters.sqlite import SqliteConfig - config = SqliteConfig(pool_config={"database": "myapp.db", "timeout": 5.0, "check_same_thread": False}) - assert config.pool_config["database"] == "myapp.db" + config = SqliteConfig(connection_config={"database": "myapp.db", "timeout": 5.0, "check_same_thread": False}) + assert config.connection_config["database"] == "myapp.db" # end-example diff --git a/docs/examples/usage/usage_drivers_and_querying_8.py b/docs/examples/usage/usage_drivers_and_querying_8.py index 64f2c2b7f..b20daee6b 100644 --- a/docs/examples/usage/usage_drivers_and_querying_8.py +++ b/docs/examples/usage/usage_drivers_and_querying_8.py @@ -13,7 +13,7 @@ async def test_example_8_aiosqlite_config(tmp_path: Path) -> None: from sqlspec.adapters.aiosqlite import AiosqliteConfig database_file = tmp_path / "myapp.db" - config = AiosqliteConfig(pool_config={"database": database_file}) + config = AiosqliteConfig(connection_config={"database": database_file}) spec = SQLSpec() async with spec.provide_session(config) as session: diff --git a/docs/examples/usage/usage_drivers_and_querying_9.py b/docs/examples/usage/usage_drivers_and_querying_9.py index 48c8b6bdb..dbe3fa0b8 100644 --- a/docs/examples/usage/usage_drivers_and_querying_9.py +++ b/docs/examples/usage/usage_drivers_and_querying_9.py @@ -17,7 +17,7 @@ async def test_example_9_asyncmy_config(mysql_service: MySQLService) -> None: spec = SQLSpec() config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/docs/examples/usage/usage_migrations_1.py b/docs/examples/usage/usage_migrations_1.py index 7ac7c9e1d..f2384d229 100644 --- a/docs/examples/usage/usage_migrations_1.py +++ b/docs/examples/usage/usage_migrations_1.py @@ -24,7 +24,7 @@ async def test_async_methods(postgres_service: PostgresService) -> None: f"@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" ) config = AsyncpgConfig( - pool_config={"dsn": dsn}, migration_config={"enabled": True, "script_location": str(migration_dir)} + connection_config={"dsn": dsn}, migration_config={"enabled": True, "script_location": str(migration_dir)} ) # Initialize migrations directory (creates __init__.py if package=True) diff --git a/docs/examples/usage/usage_migrations_10.py b/docs/examples/usage/usage_migrations_10.py index 40741acce..659ba268d 100644 --- a/docs/examples/usage/usage_migrations_10.py +++ b/docs/examples/usage/usage_migrations_10.py @@ -28,7 +28,7 @@ async def test_tracker_instance(postgres_service: PostgresService) -> None: f"@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" ) config = AsyncpgConfig( - pool_config={"dsn": dsn}, + connection_config={"dsn": dsn}, migration_config={ "enabled": True, "script_location": str(migration_dir), diff --git a/docs/examples/usage/usage_migrations_2.py b/docs/examples/usage/usage_migrations_2.py index 138fdaf7a..8d97bda43 100644 --- a/docs/examples/usage/usage_migrations_2.py +++ b/docs/examples/usage/usage_migrations_2.py @@ -16,7 +16,7 @@ def test_sync_methods() -> None: from sqlspec.adapters.sqlite import SqliteConfig config = SqliteConfig( - pool_config={"database": str(temp_db)}, + connection_config={"database": str(temp_db)}, migration_config={"enabled": True, "script_location": str(migration_dir)}, ) diff --git a/docs/examples/usage/usage_migrations_4.py b/docs/examples/usage/usage_migrations_4.py index 326e03a01..133904d2c 100644 --- a/docs/examples/usage/usage_migrations_4.py +++ b/docs/examples/usage/usage_migrations_4.py @@ -17,7 +17,7 @@ async def test_async_command_class_methods() -> None: from sqlspec.migrations.commands import AsyncMigrationCommands dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") - config = AsyncpgConfig(pool_config={"dsn": dsn}, migration_config={"script_location": str(migration_dir)}) + config = AsyncpgConfig(connection_config={"dsn": dsn}, migration_config={"script_location": str(migration_dir)}) # Create commands instance commands = AsyncMigrationCommands(config) diff --git a/docs/examples/usage/usage_migrations_5.py b/docs/examples/usage/usage_migrations_5.py index 9b85602e4..e1ab0d16a 100644 --- a/docs/examples/usage/usage_migrations_5.py +++ b/docs/examples/usage/usage_migrations_5.py @@ -8,7 +8,7 @@ # start-example dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") config = AsyncpgConfig( - pool_config={"dsn": dsn}, + connection_config={"dsn": dsn}, migration_config={ "enabled": True, "script_location": "migrations", @@ -21,7 +21,7 @@ def test_config_structure() -> None: # Check config attributes - assert hasattr(config, "pool_config") + assert hasattr(config, "connection_config") assert hasattr(config, "migration_config") assert config.migration_config["enabled"] is True assert config.migration_config["script_location"] == "migrations" diff --git a/docs/examples/usage/usage_migrations_9.py b/docs/examples/usage/usage_migrations_9.py index 9d25fb196..4c67a05aa 100644 --- a/docs/examples/usage/usage_migrations_9.py +++ b/docs/examples/usage/usage_migrations_9.py @@ -9,7 +9,7 @@ def test_extension_config() -> None: # start-example dsn = os.getenv("SQLSPEC_USAGE_PG_DSN", "postgresql://localhost/db") config = AsyncpgConfig( - pool_config={"dsn": dsn}, + connection_config={"dsn": dsn}, migration_config={ "enabled": True, "script_location": "migrations", diff --git a/docs/examples/usage/usage_query_builder_1.py b/docs/examples/usage/usage_query_builder_1.py index d3e3b2295..79dd23ac5 100644 --- a/docs/examples/usage/usage_query_builder_1.py +++ b/docs/examples/usage/usage_query_builder_1.py @@ -10,7 +10,7 @@ def test_example_1(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example1.db" # Database file path config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, # Lock timeout in seconds "check_same_thread": False, # Allow multi-thread access diff --git a/docs/examples/usage/usage_query_builder_10.py b/docs/examples/usage/usage_query_builder_10.py index bab4ea646..217b29dd5 100644 --- a/docs/examples/usage/usage_query_builder_10.py +++ b/docs/examples/usage/usage_query_builder_10.py @@ -10,7 +10,7 @@ def test_example_10(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example10.db" # Database file path config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, # Lock timeout in seconds "check_same_thread": False, # Allow multi-thread access diff --git a/docs/examples/usage/usage_query_builder_11.py b/docs/examples/usage/usage_query_builder_11.py index 884d05ed9..da6b0c731 100644 --- a/docs/examples/usage/usage_query_builder_11.py +++ b/docs/examples/usage/usage_query_builder_11.py @@ -10,7 +10,7 @@ def test_example_11(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example11.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_12.py b/docs/examples/usage/usage_query_builder_12.py index c38d39ae2..ccf46e4e9 100644 --- a/docs/examples/usage/usage_query_builder_12.py +++ b/docs/examples/usage/usage_query_builder_12.py @@ -10,7 +10,7 @@ def test_example_12(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example12.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_13.py b/docs/examples/usage/usage_query_builder_13.py index d986e9375..ef095067e 100644 --- a/docs/examples/usage/usage_query_builder_13.py +++ b/docs/examples/usage/usage_query_builder_13.py @@ -10,7 +10,7 @@ def test_example_13(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example13.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_14.py b/docs/examples/usage/usage_query_builder_14.py index 887308ed7..4acb03c9b 100644 --- a/docs/examples/usage/usage_query_builder_14.py +++ b/docs/examples/usage/usage_query_builder_14.py @@ -11,7 +11,7 @@ def test_example_14(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example14.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_15.py b/docs/examples/usage/usage_query_builder_15.py index f328fdb6a..407dc248a 100644 --- a/docs/examples/usage/usage_query_builder_15.py +++ b/docs/examples/usage/usage_query_builder_15.py @@ -10,7 +10,7 @@ def test_example_15(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example15.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_16.py b/docs/examples/usage/usage_query_builder_16.py index c7cc165b0..67f4029c1 100644 --- a/docs/examples/usage/usage_query_builder_16.py +++ b/docs/examples/usage/usage_query_builder_16.py @@ -11,7 +11,7 @@ def test_example_16(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example16.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_17.py b/docs/examples/usage/usage_query_builder_17.py index 69f16d2c2..862e40249 100644 --- a/docs/examples/usage/usage_query_builder_17.py +++ b/docs/examples/usage/usage_query_builder_17.py @@ -10,7 +10,7 @@ def test_example_17(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example17.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_18.py b/docs/examples/usage/usage_query_builder_18.py index 1221ea1f3..889072428 100644 --- a/docs/examples/usage/usage_query_builder_18.py +++ b/docs/examples/usage/usage_query_builder_18.py @@ -10,7 +10,7 @@ def test_example_18(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example18.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_19.py b/docs/examples/usage/usage_query_builder_19.py index 356321a9d..dc9210594 100644 --- a/docs/examples/usage/usage_query_builder_19.py +++ b/docs/examples/usage/usage_query_builder_19.py @@ -10,7 +10,7 @@ def test_example_19(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example19.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_2.py b/docs/examples/usage/usage_query_builder_2.py index 4f32f9a26..2cf9b1a30 100644 --- a/docs/examples/usage/usage_query_builder_2.py +++ b/docs/examples/usage/usage_query_builder_2.py @@ -10,7 +10,7 @@ def test_example_2(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example2.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_20.py b/docs/examples/usage/usage_query_builder_20.py index a93c3030c..aa6ccab5b 100644 --- a/docs/examples/usage/usage_query_builder_20.py +++ b/docs/examples/usage/usage_query_builder_20.py @@ -10,7 +10,7 @@ def test_example_20(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example20.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_21.py b/docs/examples/usage/usage_query_builder_21.py index e4245c322..505eaa7ff 100644 --- a/docs/examples/usage/usage_query_builder_21.py +++ b/docs/examples/usage/usage_query_builder_21.py @@ -10,7 +10,7 @@ def test_example_21(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example21.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_22.py b/docs/examples/usage/usage_query_builder_22.py index ca636c22d..60ac89dcf 100644 --- a/docs/examples/usage/usage_query_builder_22.py +++ b/docs/examples/usage/usage_query_builder_22.py @@ -10,7 +10,7 @@ def test_example_22(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example22.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_23.py b/docs/examples/usage/usage_query_builder_23.py index 95b1cefe4..ef623a0e4 100644 --- a/docs/examples/usage/usage_query_builder_23.py +++ b/docs/examples/usage/usage_query_builder_23.py @@ -12,7 +12,7 @@ def test_example_23(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example23.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_24.py b/docs/examples/usage/usage_query_builder_24.py index c8a3d96b3..ed542e026 100644 --- a/docs/examples/usage/usage_query_builder_24.py +++ b/docs/examples/usage/usage_query_builder_24.py @@ -14,7 +14,7 @@ def test_example_24(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example24.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_25.py b/docs/examples/usage/usage_query_builder_25.py index 5d9ab3b61..9f9b1338f 100644 --- a/docs/examples/usage/usage_query_builder_25.py +++ b/docs/examples/usage/usage_query_builder_25.py @@ -10,7 +10,7 @@ def test_example_25(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example25.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_26.py b/docs/examples/usage/usage_query_builder_26.py index bab13e1bf..d05342754 100644 --- a/docs/examples/usage/usage_query_builder_26.py +++ b/docs/examples/usage/usage_query_builder_26.py @@ -11,7 +11,7 @@ def test_example_26(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example26.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_27.py b/docs/examples/usage/usage_query_builder_27.py index 088fbfdef..46fde2c88 100644 --- a/docs/examples/usage/usage_query_builder_27.py +++ b/docs/examples/usage/usage_query_builder_27.py @@ -10,7 +10,7 @@ def test_example_27(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example27.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_28.py b/docs/examples/usage/usage_query_builder_28.py index b9933e5c2..c51786a28 100644 --- a/docs/examples/usage/usage_query_builder_28.py +++ b/docs/examples/usage/usage_query_builder_28.py @@ -12,7 +12,7 @@ def test_example_28(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example28.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_29.py b/docs/examples/usage/usage_query_builder_29.py index bc1bcce5e..5b3ae48a6 100644 --- a/docs/examples/usage/usage_query_builder_29.py +++ b/docs/examples/usage/usage_query_builder_29.py @@ -10,7 +10,7 @@ def test_example_29(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example29.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_3.py b/docs/examples/usage/usage_query_builder_3.py index f9bbfa992..24356ea8a 100644 --- a/docs/examples/usage/usage_query_builder_3.py +++ b/docs/examples/usage/usage_query_builder_3.py @@ -10,7 +10,7 @@ def test_example_3(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example3.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_30.py b/docs/examples/usage/usage_query_builder_30.py index bbf80c737..1b4b236a3 100644 --- a/docs/examples/usage/usage_query_builder_30.py +++ b/docs/examples/usage/usage_query_builder_30.py @@ -10,7 +10,7 @@ def test_example_30(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example30.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_31.py b/docs/examples/usage/usage_query_builder_31.py index b69885acd..975c4734c 100644 --- a/docs/examples/usage/usage_query_builder_31.py +++ b/docs/examples/usage/usage_query_builder_31.py @@ -10,7 +10,7 @@ def test_example_31(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example31.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_32.py b/docs/examples/usage/usage_query_builder_32.py index deb4b2634..1e961d575 100644 --- a/docs/examples/usage/usage_query_builder_32.py +++ b/docs/examples/usage/usage_query_builder_32.py @@ -10,7 +10,7 @@ def test_example_32(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example32.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_4.py b/docs/examples/usage/usage_query_builder_4.py index 7e1cdd7df..e61724ca8 100644 --- a/docs/examples/usage/usage_query_builder_4.py +++ b/docs/examples/usage/usage_query_builder_4.py @@ -11,7 +11,7 @@ def test_example_4(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example4.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_5.py b/docs/examples/usage/usage_query_builder_5.py index f9fd8fa54..708233888 100644 --- a/docs/examples/usage/usage_query_builder_5.py +++ b/docs/examples/usage/usage_query_builder_5.py @@ -10,7 +10,7 @@ def test_example_5(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example5.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_6.py b/docs/examples/usage/usage_query_builder_6.py index b7687c731..6d53ca0b4 100644 --- a/docs/examples/usage/usage_query_builder_6.py +++ b/docs/examples/usage/usage_query_builder_6.py @@ -11,7 +11,7 @@ def test_example_6(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example6.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_7.py b/docs/examples/usage/usage_query_builder_7.py index 48989f086..6a9e24db2 100644 --- a/docs/examples/usage/usage_query_builder_7.py +++ b/docs/examples/usage/usage_query_builder_7.py @@ -10,7 +10,7 @@ def test_example_7(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example7.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_8.py b/docs/examples/usage/usage_query_builder_8.py index 54004dd3b..d0f025872 100644 --- a/docs/examples/usage/usage_query_builder_8.py +++ b/docs/examples/usage/usage_query_builder_8.py @@ -10,7 +10,7 @@ def test_example_8(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example8.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_query_builder_9.py b/docs/examples/usage/usage_query_builder_9.py index d8a322c51..1e9ab6cd9 100644 --- a/docs/examples/usage/usage_query_builder_9.py +++ b/docs/examples/usage/usage_query_builder_9.py @@ -10,7 +10,7 @@ def test_example_9(tmp_path: Path) -> None: db = SQLSpec() database = tmp_path / "example9.db" config = SqliteConfig( - pool_config={ + connection_config={ "database": database.name, "timeout": 5.0, "check_same_thread": False, diff --git a/docs/examples/usage/usage_sql_files_11.py b/docs/examples/usage/usage_sql_files_11.py index bcc5722e0..ecd7a856f 100644 --- a/docs/examples/usage/usage_sql_files_11.py +++ b/docs/examples/usage/usage_sql_files_11.py @@ -22,7 +22,7 @@ class User(BaseModel): query = loader.get_sql("get_user_by_id") spec = SQLSpec(loader=loader) - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with spec.provide_session(config) as session: session.execute("""CREATE TABLE users ( id INTEGER PRIMARY KEY, username TEXT, email TEXT)""") diff --git a/docs/examples/usage/usage_sql_files_14.py b/docs/examples/usage/usage_sql_files_14.py index df16ff2b9..0197c8611 100644 --- a/docs/examples/usage/usage_sql_files_14.py +++ b/docs/examples/usage/usage_sql_files_14.py @@ -50,7 +50,7 @@ async def test_multi_database_setup_example(tmp_path: Path, postgres_service: Po spec = SQLSpec() postgres_config = AsyncpgConfig( - pool_config={ + connection_config={ "user": postgres_service.user, "password": postgres_service.password, "host": postgres_service.host, diff --git a/docs/extensions/adk/adapters.rst b/docs/extensions/adk/adapters.rst index c2f261ccc..e76b21ce2 100644 --- a/docs/extensions/adk/adapters.rst +++ b/docs/extensions/adk/adapters.rst @@ -58,7 +58,7 @@ AsyncPG (Recommended) from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://user:pass@localhost:5432/agentdb", "min_size": 10, "max_size": 20, @@ -122,7 +122,7 @@ Psycopg from sqlspec.adapters.psycopg import PsycopgAsyncConfig from sqlspec.adapters.psycopg.adk import PsycopgADKStore - config = PsycopgAsyncConfig(pool_config={ + config = PsycopgAsyncConfig(connection_config={ "conninfo": "postgresql://user:pass@localhost/agentdb", "min_size": 5, "max_size": 20 @@ -155,7 +155,7 @@ Psqlpy from sqlspec.adapters.psqlpy import PsqlpyConfig from sqlspec.adapters.psqlpy.adk import PsqlpyADKStore - config = PsqlpyConfig(pool_config={ + config = PsqlpyConfig(connection_config={ "dsn": "postgresql://user:pass@localhost/agentdb", "max_db_pool_size": 20 }) @@ -196,7 +196,7 @@ AsyncMy from sqlspec.adapters.asyncmy import AsyncmyConfig from sqlspec.adapters.asyncmy.adk import AsyncmyADKStore - config = AsyncmyConfig(pool_config={ + config = AsyncmyConfig(connection_config={ "host": "localhost", "port": 3306, "user": "agent_user", @@ -272,7 +272,7 @@ SQLite (Sync) from sqlspec.adapters.sqlite import SqliteConfig from sqlspec.adapters.sqlite.adk import SqliteADKStore - config = SqliteConfig(pool_config={ + config = SqliteConfig(connection_config={ "database": "/path/to/agent.db", "check_same_thread": False # Allow multi-threaded access }) @@ -327,7 +327,7 @@ AIOSqlite from sqlspec.adapters.aiosqlite import AiosqliteConfig from sqlspec.adapters.aiosqlite.adk import AiosqliteADKStore - config = AiosqliteConfig(pool_config={ + config = AiosqliteConfig(connection_config={ "database": "/path/to/agent.db" }) @@ -365,7 +365,7 @@ OracleDB from sqlspec.adapters.oracledb import OracleConfig from sqlspec.adapters.oracledb.adk import OracleADKStore - config = OracleConfig(pool_config={ + config = OracleConfig(connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "localhost:1521/ORCLPDB1", @@ -573,12 +573,12 @@ DuckDB from sqlspec.adapters.duckdb.adk import DuckdbADKStore # File-based database - config = DuckDBConfig(pool_config={ + config = DuckDBConfig(connection_config={ "database": "/path/to/sessions.duckdb" }) # Or in-memory for testing - config = DuckDBConfig(pool_config={ + config = DuckDBConfig(connection_config={ "database": ":memory:" }) diff --git a/docs/extensions/adk/api.rst b/docs/extensions/adk/api.rst index c42fc9204..e8672d59b 100644 --- a/docs/extensions/adk/api.rst +++ b/docs/extensions/adk/api.rst @@ -35,7 +35,7 @@ SQLSpecSessionService from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore from sqlspec.extensions.adk import SQLSpecSessionService - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) await store.create_tables() @@ -115,7 +115,7 @@ BaseAsyncADKStore from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore( config, session_table="custom_sessions", @@ -166,7 +166,7 @@ BaseSyncADKStore from sqlspec.adapters.sqlite import SqliteConfig from sqlspec.adapters.sqlite.adk import SqliteADKStore - config = SqliteConfig(pool_config={"database": "agent.db"}) + config = SqliteConfig(connection_config={"database": "agent.db"}) store = SqliteADKStore(config) store.create_tables() diff --git a/docs/extensions/adk/backends/adbc.rst b/docs/extensions/adk/backends/adbc.rst index 0993113c4..70b315887 100644 --- a/docs/extensions/adk/backends/adbc.rst +++ b/docs/extensions/adk/backends/adbc.rst @@ -372,7 +372,7 @@ ADBC uses standard SQL compatible with most databases. To migrate: from sqlspec.adapters.adbc.adk import AdbcADKStore # Source (AsyncPG) - source_config = AsyncpgConfig(pool_config={"dsn": "..."}) + source_config = AsyncpgConfig(connection_config={"dsn": "..."}) source_store = AsyncpgADKStore(source_config) # Destination (ADBC) diff --git a/docs/extensions/adk/backends/aiosqlite.rst b/docs/extensions/adk/backends/aiosqlite.rst index 0a5fa0cae..d27bb09c6 100644 --- a/docs/extensions/adk/backends/aiosqlite.rst +++ b/docs/extensions/adk/backends/aiosqlite.rst @@ -62,7 +62,7 @@ Basic Async File-Based Database async def main(): # Create async file-based database - config = AiosqliteConfig(pool_config={"database": "./agent_sessions.db"}) + config = AiosqliteConfig(connection_config={"database": "./agent_sessions.db"}) store = AiosqliteADKStore(config) await store.create_tables() @@ -90,7 +90,7 @@ Async In-Memory Database (Testing) async def test_setup(): # Create async in-memory database - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) store = AiosqliteADKStore(config) await store.create_tables() @@ -116,7 +116,7 @@ Basic Configuration from sqlspec.adapters.aiosqlite import AiosqliteConfig config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "/path/to/database.db", # or ":memory:" "timeout": 5.0, # Connection timeout "isolation_level": "DEFERRED", # Transaction isolation @@ -136,7 +136,7 @@ Connection Pooling .. code-block:: python config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "./sessions.db", "pool_size": 5, # Connection pool size "connect_timeout": 30.0, # Pool acquire timeout @@ -234,7 +234,7 @@ Async Context Managers from sqlspec.adapters.aiosqlite.adk import AiosqliteADKStore async def use_store(): - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) # Async context manager for connections async with config.provide_connection() as conn: @@ -513,7 +513,7 @@ Database Locked Errors # 2. Increase timeout config = AiosqliteConfig( - pool_config={"database": "./db.sqlite", "timeout": 30.0} + connection_config={"database": "./db.sqlite", "timeout": 30.0} ) # 3. Use transaction batching (reduce write frequency) @@ -536,7 +536,7 @@ Ensure you're using ``asyncio.run()`` or managing the event loop properly: import asyncio async def main(): - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) store = AiosqliteADKStore(config) await store.create_tables() @@ -559,7 +559,7 @@ Thread Safety Concerns # check_same_thread=False is safe with aiosqlite config = AiosqliteConfig( - pool_config={ + connection_config={ "database": "./db.sqlite", "check_same_thread": False # Safe with aiosqlite } @@ -609,7 +609,7 @@ Use Connection Pooling .. code-block:: python # Good: Reuse connection pool - config = AiosqliteConfig(pool_config={"database": "./db.sqlite", "pool_size": 5}) + config = AiosqliteConfig(connection_config={"database": "./db.sqlite", "pool_size": 5}) store = AiosqliteADKStore(config) # All operations use the pool @@ -641,7 +641,7 @@ Graceful Cleanup .. code-block:: python async def application_lifecycle(): - config = AiosqliteConfig(pool_config={"database": "./db.sqlite"}) + config = AiosqliteConfig(connection_config={"database": "./db.sqlite"}) store = AiosqliteADKStore(config) await store.create_tables() @@ -673,7 +673,7 @@ Migrating from sync SQLite to AIOSQLite is straightforward: from sqlspec.adapters.aiosqlite.adk import AiosqliteADKStore async def async_app(): - config = AiosqliteConfig(pool_config={"database": "./db.sqlite"}) + config = AiosqliteConfig(connection_config={"database": "./db.sqlite"}) store = AiosqliteADKStore(config) # ... async operations with await ... @@ -681,7 +681,7 @@ Migrating from sync SQLite to AIOSQLite is straightforward: 1. Import from ``aiosqlite`` instead of ``sqlite`` 2. Add ``async``/``await`` keywords -3. Use ``pool_config`` parameter (not direct kwargs) +3. Use ``connection_config`` parameter (not direct kwargs) 4. Use ``asyncio.run()`` to execute API Reference diff --git a/docs/extensions/adk/backends/asyncmy.rst b/docs/extensions/adk/backends/asyncmy.rst index c46042216..ca860ee97 100644 --- a/docs/extensions/adk/backends/asyncmy.rst +++ b/docs/extensions/adk/backends/asyncmy.rst @@ -53,7 +53,7 @@ Basic Async Connection from sqlspec.extensions.adk import SQLSpecSessionService config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 3306, "user": "myuser", @@ -83,7 +83,7 @@ AsyncMy's built-in connection pool is production-ready: .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql.example.com", "port": 3306, "user": "agent_user", @@ -182,7 +182,7 @@ Basic Configuration from sqlspec.adapters.asyncmy import AsyncmyConfig config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 3306, "user": "myuser", @@ -197,7 +197,7 @@ Advanced Configuration .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql-primary.example.com", "port": 3306, "user": "agent_app", @@ -326,13 +326,13 @@ Connection Pool Tuning .. code-block:: python # Low traffic (< 100 concurrent users) - pool_config = {"minsize": 5, "maxsize": 20} + connection_config = {"minsize": 5, "maxsize": 20} # Medium traffic (100-1000 concurrent users) - pool_config = {"minsize": 20, "maxsize": 100} + connection_config = {"minsize": 20, "maxsize": 100} # High traffic (> 1000 concurrent users) - pool_config = {"minsize": 50, "maxsize": 200} + connection_config = {"minsize": 50, "maxsize": 200} **Connection Recycling:** @@ -341,7 +341,7 @@ Prevent stale connections with ``pool_recycle``: .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql.example.com", "pool_recycle": 3600, # Recycle after 1 hour # ... @@ -390,7 +390,7 @@ MySQL 8.0+ removed query cache. Use connection pooling instead: # Proper connection pooling is more effective than query cache config = AsyncmyConfig( - pool_config={"minsize": 20, "maxsize": 100} + connection_config={"minsize": 20, "maxsize": 100} ) Index Usage Verification @@ -459,7 +459,7 @@ Always use ``utf8mb4`` for full Unicode support: .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "charset": "utf8mb4", # NOT "utf8" (only 3 bytes) # ... } @@ -478,7 +478,7 @@ Force UTC timezone for consistency: .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "init_command": "SET time_zone='+00:00'", # ... } @@ -497,7 +497,7 @@ Enable SSL for production: .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql-prod.example.com", "ssl": { "ca": "/etc/ssl/certs/ca-cert.pem", @@ -526,7 +526,7 @@ AsyncMy excels in async web frameworks: @asynccontextmanager async def lifespan(app: FastAPI): # Startup - config = AsyncmyConfig(pool_config={...}) + config = AsyncmyConfig(connection_config={...}) await config.create_pool() yield # Shutdown @@ -551,7 +551,7 @@ Connection pooling with tenant isolation: # Separate databases per tenant async def get_tenant_config(tenant_id: str) -> AsyncmyConfig: return AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql.example.com", "database": f"tenant_{tenant_id}", "minsize": 5, @@ -591,7 +591,7 @@ Leverage existing MySQL deployments: # Connect to existing MySQL instance config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "existing-mysql.company.com", "port": 3306, "user": "agent_app", @@ -637,7 +637,7 @@ Connection Pool Exhausted # Increase pool size config = AsyncmyConfig( - pool_config={ + connection_config={ "maxsize": 100, # Increase from default # ... } @@ -701,7 +701,7 @@ Connection Timeout Errors .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "host": "mysql.example.com", "connect_timeout": 30, # Increase from default 10s # ... @@ -720,7 +720,7 @@ UTF-8 Encoding Issues .. code-block:: python config = AsyncmyConfig( - pool_config={ + connection_config={ "charset": "utf8mb4", # NOT "utf8" # ... } diff --git a/docs/extensions/adk/backends/asyncpg.rst b/docs/extensions/adk/backends/asyncpg.rst index eb9907714..b5e8e78cd 100644 --- a/docs/extensions/adk/backends/asyncpg.rst +++ b/docs/extensions/adk/backends/asyncpg.rst @@ -78,7 +78,7 @@ Basic Configuration async def main(): # Create configuration with connection pool config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/agentdb", "min_size": 5, "max_size": 20, @@ -111,12 +111,12 @@ AsyncPG supports multiple connection string formats: .. code-block:: python # Full DSN - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://user:password@host:5432/database" }) # Individual parameters - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "host": "localhost", "port": 5432, "user": "agent_user", @@ -125,7 +125,7 @@ AsyncPG supports multiple connection string formats: }) # With SSL - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://user:pass@host:5432/db?sslmode=require" }) @@ -142,7 +142,7 @@ AsyncPG's built-in connection pool is highly configurable: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={ + connection_config={ # Connection parameters "dsn": "postgresql://localhost/agentdb", "user": "agent_user", @@ -306,7 +306,7 @@ Session Management from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore async def session_example(): - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) await store.create_tables() @@ -354,7 +354,7 @@ Event Management from google.genai import types async def event_example(): - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) # Create session first @@ -421,7 +421,7 @@ Integration with SQLSpecSessionService from sqlspec.extensions.adk import SQLSpecSessionService async def service_example(): - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) await store.create_tables() @@ -487,7 +487,7 @@ Connection Pooling Best Practices .. code-block:: python # Create config and pool once at application startup - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://...", "min_size": 10, "max_size": 20 @@ -674,7 +674,7 @@ AsyncPG is ideal for async web frameworks: from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore # Initialize at app startup - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) @get("/session/{session_id:str}") @@ -694,7 +694,7 @@ Handle thousands of concurrent users: .. code-block:: python - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://...", "min_size": 20, "max_size": 50, @@ -845,7 +845,7 @@ Connection Pool Exhausted .. code-block:: python # Increase pool size - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "max_size": 50, # Increase from default 10 "command_timeout": 30.0 # Prevent hung connections }) @@ -872,7 +872,7 @@ Connection Refused psql -h localhost -U postgres -d agentdb # Check connection parameters - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "host": "localhost", # Correct host "port": 5432, # Correct port "user": "postgres", # Correct user @@ -918,7 +918,7 @@ SSL Connection Issues import ssl # Require SSL - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://...", "ssl": "require" }) @@ -928,7 +928,7 @@ SSL Connection Issues ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://...", "ssl": ssl_context }) @@ -956,7 +956,7 @@ JSONB Type Codec Errors return json.dumps(obj) config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, driver_features={ "json_serializer": custom_json_serializer } @@ -982,7 +982,7 @@ From SQLite to AsyncPG sessions = sqlite_store.list_sessions("app", "user") # Import to AsyncPG - pg_config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + pg_config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) pg_store = AsyncpgADKStore(pg_config) await pg_store.create_tables() @@ -1010,7 +1010,7 @@ Both use the same SQL schema, so migration is straightforward: from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore # Just change the config class - SQL is identical - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://..." # Same connection string }) diff --git a/docs/extensions/adk/backends/bigquery.rst b/docs/extensions/adk/backends/bigquery.rst index 86541e168..684bd32da 100644 --- a/docs/extensions/adk/backends/bigquery.rst +++ b/docs/extensions/adk/backends/bigquery.rst @@ -258,7 +258,7 @@ Migrating from PostgreSQL/MySQL to BigQuery: from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - pg_config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + pg_config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) pg_store = AsyncpgADKStore(pg_config) # Import to BigQuery diff --git a/docs/extensions/adk/backends/duckdb.rst b/docs/extensions/adk/backends/duckdb.rst index 1e07b723a..ec3f057d8 100644 --- a/docs/extensions/adk/backends/duckdb.rst +++ b/docs/extensions/adk/backends/duckdb.rst @@ -445,7 +445,7 @@ When your prototype becomes production, migrate to PostgreSQL: from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - pg_config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + pg_config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) pg_store = AsyncpgADKStore(pg_config) await pg_store.create_tables() diff --git a/docs/extensions/adk/backends/oracledb.rst b/docs/extensions/adk/backends/oracledb.rst index 1869ca387..fd83efde5 100644 --- a/docs/extensions/adk/backends/oracledb.rst +++ b/docs/extensions/adk/backends/oracledb.rst @@ -96,7 +96,7 @@ Async Store (Recommended) # Configure Oracle connection config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "oracle.example.com:1521/XEPDB1", @@ -129,7 +129,7 @@ Sync Store # Configure Oracle connection config = OracleSyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "oracle.example.com:1521/XEPDB1", @@ -163,7 +163,7 @@ Oracle supports multiple DSN (Data Source Name) formats: .. code-block:: python config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "hostname:1521/service_name", @@ -175,7 +175,7 @@ Oracle supports multiple DSN (Data Source Name) formats: .. code-block:: python config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "tcps://hostname:2484/service_name?ssl_server_cert_dn=CN=server", @@ -187,7 +187,7 @@ Oracle supports multiple DSN (Data Source Name) formats: .. code-block:: python config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": """(DESCRIPTION= @@ -201,7 +201,7 @@ Oracle supports multiple DSN (Data Source Name) formats: .. code-block:: python config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "PROD_DB", # Name from tnsnames.ora @@ -216,7 +216,7 @@ Oracle connection pooling is **mandatory** for production: .. code-block:: python config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "oracle.example.com:1521/XEPDB1", @@ -244,7 +244,7 @@ Configure custom table names via ``extension_config``: .. code-block:: python config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "adk": { "session_table": "agent_sessions", @@ -280,7 +280,7 @@ Enable In-Memory via ``extension_config``: from sqlspec.adapters.oracledb.adk import OracleAsyncADKStore config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": "oracle.example.com:1521/XEPDB1", @@ -847,7 +847,7 @@ Security Best Practices import os config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": os.environ["ORACLE_USER"], "password": os.environ["ORACLE_PASSWORD"], "dsn": os.environ["ORACLE_DSN"], @@ -857,7 +857,7 @@ Security Best Practices # 2. Use Oracle Wallet (thick mode) oracledb.init_oracle_client() config = OracleAsyncConfig( - pool_config={ + connection_config={ "dsn": "wallet_alias", # No user/password needed - from wallet } @@ -865,7 +865,7 @@ Security Best Practices # 3. Limit connection pool size config = OracleAsyncConfig( - pool_config={ + connection_config={ "max": 10, # Prevent resource exhaustion } ) @@ -911,7 +911,7 @@ Enterprise AI Agent Platform # Production configuration config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": os.environ["ORACLE_USER"], "password": os.environ["ORACLE_PASSWORD"], "dsn": "prod-oracle.example.com:1521/PROD", @@ -966,7 +966,7 @@ High-Availability Setup # Oracle RAC (Real Application Clusters) config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": "agent_user", "password": "secure_password", "dsn": """(DESCRIPTION= diff --git a/docs/extensions/adk/backends/psqlpy.rst b/docs/extensions/adk/backends/psqlpy.rst index 1ab429c4c..2a6459afc 100644 --- a/docs/extensions/adk/backends/psqlpy.rst +++ b/docs/extensions/adk/backends/psqlpy.rst @@ -54,7 +54,7 @@ Basic Setup # Create configuration with connection pool config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "max_db_pool_size": 10, } @@ -159,7 +159,7 @@ Basic Configuration from sqlspec.adapters.psqlpy import PsqlpyConfig config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "max_db_pool_size": 20, } @@ -171,7 +171,7 @@ Advanced Connection Pooling .. code-block:: python config = PsqlpyConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 5432, "username": "user", @@ -192,7 +192,7 @@ SSL Configuration .. code-block:: python config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "ssl_mode": "require", "sslrootcert": "/path/to/ca.crt", @@ -328,7 +328,7 @@ Optimize pool size for your workload: # For high-concurrency workloads config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://...", "max_db_pool_size": 100, # Large pool for many concurrent users } @@ -336,7 +336,7 @@ Optimize pool size for your workload: # For low-latency workloads config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://...", "max_db_pool_size": 20, # Smaller pool, faster checkout "connect_timeout_sec": 5, # Fail fast @@ -500,7 +500,7 @@ High-Performance Agent API # High-performance configuration config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost:5432/agents", "max_db_pool_size": 100, "connect_timeout_sec": 5, @@ -556,7 +556,7 @@ Rust Microservices Integration # from pydantic_core import ValidationError # Rust-based validation config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost:5432/microservices" } ) @@ -610,7 +610,7 @@ Connection Pool Exhaustion # Increase pool size config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://...", "max_db_pool_size": 50, # Increase from default } diff --git a/docs/extensions/adk/backends/psycopg.rst b/docs/extensions/adk/backends/psycopg.rst index ac580ba4d..e23632716 100644 --- a/docs/extensions/adk/backends/psycopg.rst +++ b/docs/extensions/adk/backends/psycopg.rst @@ -83,7 +83,7 @@ Async Usage (Recommended) # Create async config with connection pool config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:pass@localhost/db", "min_size": 5, "max_size": 20, @@ -115,7 +115,7 @@ Sync Usage # Create sync config with connection pool config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:pass@localhost/db", "min_size": 5, "max_size": 20, @@ -147,7 +147,7 @@ Basic Async Configuration from sqlspec.adapters.psycopg import PsycopgAsyncConfig config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:pass@localhost:5432/dbname", "min_size": 5, # Minimum pool connections "max_size": 20, # Maximum pool connections @@ -165,7 +165,7 @@ Basic Sync Configuration from sqlspec.adapters.psycopg import PsycopgSyncConfig config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:pass@localhost:5432/dbname", "min_size": 5, "max_size": 20, @@ -178,7 +178,7 @@ Advanced Configuration .. code-block:: python config = PsycopgAsyncConfig( - pool_config={ + connection_config={ # Connection string "conninfo": "postgresql://user:pass@localhost/db?sslmode=require", @@ -479,7 +479,7 @@ Psycopg3 has built-in connection pooling via ``psycopg_pool``: .. code-block:: python config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://...", "min_size": 5, # Pre-create 5 connections "max_size": 20, # Allow up to 20 connections @@ -596,7 +596,7 @@ Connection Pool Sizing # For web applications config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "min_size": 10, # Match expected concurrent requests "max_size": 50, # 2-3x min_size for burst traffic "max_lifetime": 3600.0, # Recycle hourly @@ -605,7 +605,7 @@ Connection Pool Sizing # For background workers config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "min_size": 2, "max_size": 10, } @@ -625,7 +625,7 @@ Async Web Application from sqlspec.extensions.adk import SQLSpecSessionService config = PsycopgAsyncConfig( - pool_config={"conninfo": "postgresql://..."} + connection_config={"conninfo": "postgresql://..."} ) store = PsycopgAsyncADKStore(config) service = SQLSpecSessionService(store) @@ -647,7 +647,7 @@ Sync Background Worker from sqlspec.extensions.adk import SQLSpecSessionService config = PsycopgSyncConfig( - pool_config={"conninfo": "postgresql://..."} + connection_config={"conninfo": "postgresql://..."} ) store = PsycopgSyncADKStore(config) service = SQLSpecSessionService(store) @@ -666,13 +666,13 @@ Mixed Async/Sync Application # Async config for web API async_config = PsycopgAsyncConfig( - pool_config={"conninfo": "postgresql://..."} + connection_config={"conninfo": "postgresql://..."} ) async_store = PsycopgAsyncADKStore(async_config) # Sync config for CLI tools (separate pool) sync_config = PsycopgSyncConfig( - pool_config={"conninfo": "postgresql://..."} + connection_config={"conninfo": "postgresql://..."} ) sync_store = PsycopgSyncADKStore(sync_config) diff --git a/docs/extensions/adk/backends/sqlite.rst b/docs/extensions/adk/backends/sqlite.rst index b3ea59d7e..5b8eae6d4 100644 --- a/docs/extensions/adk/backends/sqlite.rst +++ b/docs/extensions/adk/backends/sqlite.rst @@ -54,7 +54,7 @@ File-Based Database from sqlspec.extensions.adk import SQLSpecSessionService # Create file-based database - config = SqliteConfig(pool_config={"database": "./agent_sessions.db"}) + config = SqliteConfig(connection_config={"database": "./agent_sessions.db"}) store = SqliteADKStore(config) await store.create_tables() @@ -77,7 +77,7 @@ In-Memory Database (Testing) from sqlspec.adapters.sqlite.adk import SqliteADKStore # Create in-memory database (ephemeral) - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) store = SqliteADKStore(config) await store.create_tables() @@ -98,7 +98,7 @@ Basic Configuration from sqlspec.adapters.sqlite import SqliteConfig config = SqliteConfig( - pool_config={ + connection_config={ "database": "/path/to/database.db", # or ":memory:" "timeout": 5.0, # Lock timeout in seconds "check_same_thread": False, # Allow multi-threaded access @@ -116,7 +116,7 @@ Write-Ahead Logging (WAL) mode significantly improves concurrency: from sqlspec.adapters.sqlite import SqliteConfig config = SqliteConfig( - pool_config={ + connection_config={ "database": "./agent.db", "check_same_thread": False, } @@ -364,10 +364,10 @@ Best Practices # Good: Application data directory app_data = Path.home() / ".myagent" / "sessions.db" app_data.parent.mkdir(parents=True, exist_ok=True) - config = SqliteConfig(pool_config={"database": str(app_data)}) + config = SqliteConfig(connection_config={"database": str(app_data)}) # Bad: Hard-coded paths - config = SqliteConfig(pool_config={"database": "/tmp/sessions.db"}) + config = SqliteConfig(connection_config={"database": "/tmp/sessions.db"}) 5. Backup Strategy ------------------ @@ -456,7 +456,7 @@ SQLite's zero-configuration makes it perfect for rapid development: .. code-block:: python # Quick setup - no database server needed! - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) store = SqliteADKStore(config) await store.create_tables() @@ -476,7 +476,7 @@ Store agent sessions locally in desktop apps: app_data = Path.home() / ".my_agent" / "sessions.db" app_data.parent.mkdir(parents=True, exist_ok=True) - config = SqliteConfig(pool_config={"database": str(app_data)}) + config = SqliteConfig(connection_config={"database": str(app_data)}) store = SqliteADKStore(config) await store.create_tables() @@ -498,7 +498,7 @@ In-memory databases for fast, isolated tests: @pytest.fixture async def test_store(): """Provide fresh in-memory store for each test.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) store = SqliteADKStore(config) await store.create_tables() yield store @@ -536,7 +536,7 @@ Database Locked Errors .. code-block:: python - config = SqliteConfig(pool_config={ + config = SqliteConfig(connection_config={ "database": "./agent.db", "timeout": 30.0 # Wait up to 30 seconds for locks }) @@ -574,7 +574,7 @@ File Permission Errors db_path = Path("./data/agent.db") db_path.parent.mkdir(parents=True, exist_ok=True) - config = SqliteConfig(pool_config={"database": str(db_path)}) + config = SqliteConfig(connection_config={"database": str(db_path)}) 2. **Check write permissions**: @@ -616,7 +616,7 @@ When ready for production, migrate from SQLite to PostgreSQL: from sqlspec.adapters.sqlite import SqliteConfig from sqlspec.adapters.sqlite.adk import SqliteADKStore - sqlite_config = SqliteConfig(pool_config={"database": "./dev.db"}) + sqlite_config = SqliteConfig(connection_config={"database": "./dev.db"}) sqlite_store = SqliteADKStore(sqlite_config) # Get all sessions @@ -626,7 +626,7 @@ When ready for production, migrate from SQLite to PostgreSQL: from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - pg_config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + pg_config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) pg_store = AsyncpgADKStore(pg_config) await pg_store.create_tables() @@ -678,7 +678,7 @@ Complete runnable example demonstrating SQLite ADK integration: """Demonstrate SQLite ADK session storage.""" # File-based database db_path = Path("./agent_sessions.db") - config = SqliteConfig(pool_config={"database": str(db_path)}) + config = SqliteConfig(connection_config={"database": str(db_path)}) store = SqliteADKStore(config) await store.create_tables() diff --git a/docs/extensions/adk/migrations.rst b/docs/extensions/adk/migrations.rst index 236c1ce56..73a2b7b3a 100644 --- a/docs/extensions/adk/migrations.rst +++ b/docs/extensions/adk/migrations.rst @@ -22,7 +22,7 @@ The simplest approach for development and small deployments: from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) # Create tables if they don't exist @@ -50,7 +50,7 @@ Setting Up Migrations from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "adk_sessions", @@ -210,7 +210,7 @@ Configure custom table names via ``extension_config``: .. code-block:: python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "my_custom_sessions", @@ -256,7 +256,7 @@ To include a owner ID column in your ADK tables, configure it in ``extension_con from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "adk_sessions", @@ -367,7 +367,7 @@ For multi-tenant applications, create separate migrations per tenant: # Tenant A config config_a = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "tenant_a_sessions", @@ -378,7 +378,7 @@ For multi-tenant applications, create separate migrations per tenant: # Tenant B config config_b = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "tenant_b_sessions", @@ -392,7 +392,7 @@ Or use a single database with schema separation (PostgreSQL): .. code-block:: python config_a = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "tenant_a.sessions", @@ -573,7 +573,7 @@ Test migrations in a staging environment: async def migration_config(): """Test database configuration.""" return AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test_db"} + connection_config={"dsn": "postgresql://localhost/test_db"} ) diff --git a/docs/extensions/adk/quickstart.rst b/docs/extensions/adk/quickstart.rst index 2d48faf8e..318d475f4 100644 --- a/docs/extensions/adk/quickstart.rst +++ b/docs/extensions/adk/quickstart.rst @@ -47,7 +47,7 @@ Create a database configuration. This example uses PostgreSQL with AsyncPG: .. code-block:: python - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "min_size": 5, "max_size": 20 @@ -64,7 +64,7 @@ For local development with SQLite: from sqlspec.adapters.aiosqlite import AiosqliteConfig from sqlspec.adapters.aiosqlite.adk import AiosqliteADKStore - config = AiosqliteConfig(pool_config={ + config = AiosqliteConfig(connection_config={ "database": "./my_agent.db" }) @@ -313,7 +313,7 @@ The ``owner_id_column`` parameter accepts a full column DDL definition: from sqlspec.adapters.asyncpg import AsyncpgConfig from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore - config = AsyncpgConfig(pool_config={ + config = AsyncpgConfig(connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb" }) @@ -438,7 +438,7 @@ Complete example linking sessions to tenants: from sqlspec.adapters.duckdb import DuckDBConfig from sqlspec.adapters.duckdb.adk import DuckdbADKStore - config = DuckDBConfig(pool_config={"database": "multi_tenant.ddb"}) + config = DuckDBConfig(connection_config={"database": "multi_tenant.ddb"}) # Create tenants table with config.provide_connection() as conn: @@ -559,7 +559,7 @@ For migrations and programmatic configuration, use ``extension_config``: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "adk_sessions", diff --git a/docs/extensions/aiosql/index.rst b/docs/extensions/aiosql/index.rst index c4c8220f5..2bf36d4c0 100644 --- a/docs/extensions/aiosql/index.rst +++ b/docs/extensions/aiosql/index.rst @@ -115,7 +115,7 @@ SQLFileLoader (Built-in) # Set up database spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) # Load SQL files @@ -159,7 +159,7 @@ aiosql Adapter (Compatibility) # Set up database spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) # Create adapter and load queries diff --git a/docs/extensions/aiosql/migration.rst b/docs/extensions/aiosql/migration.rst index 967985f5b..ca4658d5f 100644 --- a/docs/extensions/aiosql/migration.rst +++ b/docs/extensions/aiosql/migration.rst @@ -178,7 +178,7 @@ Async Usage spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) ) spec.load_sql_files("queries.sql") @@ -227,7 +227,7 @@ You can use both SQLFileLoader and the aiosql adapter in the same project: spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) # Load some SQL files with SQLFileLoader diff --git a/docs/extensions/aiosql/quickstart.rst b/docs/extensions/aiosql/quickstart.rst index 0d18076dc..18e784834 100644 --- a/docs/extensions/aiosql/quickstart.rst +++ b/docs/extensions/aiosql/quickstart.rst @@ -82,7 +82,7 @@ Step 3: Execute Queries # Set up database (continue from Step 2) config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) # Execute queries @@ -150,7 +150,7 @@ Complete SQLFileLoader Example # Set up database spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) # Load SQL files @@ -223,7 +223,7 @@ Step 2: Use with aiosql Adapter # Set up SQLSpec spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) async with spec.provide_driver(config) as driver: diff --git a/docs/extensions/litestar/api.rst b/docs/extensions/litestar/api.rst index eb799a01d..577fb0779 100644 --- a/docs/extensions/litestar/api.rst +++ b/docs/extensions/litestar/api.rst @@ -26,7 +26,7 @@ Configure the plugin via ``extension_config`` in database configuration: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": { "connection_key": "db_connection", @@ -199,7 +199,7 @@ Basic Plugin Setup spec = SQLSpec() db = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) plugin = SQLSpecPlugin(sqlspec=spec) @@ -220,7 +220,7 @@ Multi-Database Setup primary = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/primary"}, + connection_config={"dsn": "postgresql://localhost/primary"}, extension_config={ "litestar": {"session_key": "primary_session"} } @@ -256,7 +256,7 @@ Session Store Setup # Add database configuration config = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"litestar": {"session_table": "litestar_sessions"}}, ) ) diff --git a/docs/extensions/litestar/dependency_injection.rst b/docs/extensions/litestar/dependency_injection.rst index 6a2d7a167..4e4ec34c4 100644 --- a/docs/extensions/litestar/dependency_injection.rst +++ b/docs/extensions/litestar/dependency_injection.rst @@ -109,7 +109,7 @@ For multi-database setups, use custom dependency keys: # Primary database primary = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/primary"}, + connection_config={"dsn": "postgresql://localhost/primary"}, extension_config={ "litestar": {"session_key": "primary_session"} } @@ -119,7 +119,7 @@ For multi-database setups, use custom dependency keys: # Analytics database analytics = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/analytics"}, + connection_config={"dsn": "postgresql://localhost/analytics"}, extension_config={ "litestar": {"session_key": "analytics_session"} } @@ -145,7 +145,7 @@ Customize dependency keys via ``extension_config``: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": { "connection_key": "db_connection", # Raw connection key @@ -171,7 +171,7 @@ Configure multiple databases with unique dependency keys: # Primary PostgreSQL database primary = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/app"}, + connection_config={"dsn": "postgresql://localhost/app"}, extension_config={ "litestar": { "connection_key": "primary_connection", diff --git a/docs/extensions/litestar/index.rst b/docs/extensions/litestar/index.rst index 6f4907841..efb7ea98d 100644 --- a/docs/extensions/litestar/index.rst +++ b/docs/extensions/litestar/index.rst @@ -86,7 +86,7 @@ Here's a simple example of creating a Litestar application with SQLSpec integrat # 2. Configure database db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": {"commit_mode": "autocommit"} } @@ -198,7 +198,7 @@ Connect to multiple databases with unique dependency keys: # Primary application database primary_db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/app"}, + connection_config={"dsn": "postgresql://localhost/app"}, extension_config={ "litestar": {"session_key": "primary_session"} } @@ -247,7 +247,7 @@ Store user sessions in the database: # 2. Add database configuration db = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) ) # 3. Create session store backed by PostgreSQL diff --git a/docs/extensions/litestar/quickstart.rst b/docs/extensions/litestar/quickstart.rst index 8cc465eae..15453605a 100644 --- a/docs/extensions/litestar/quickstart.rst +++ b/docs/extensions/litestar/quickstart.rst @@ -47,7 +47,7 @@ Create a SQLSpec instance and add a database configuration: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "min_size": 5, "max_size": 20 @@ -72,7 +72,7 @@ For local development with SQLite: db = spec.add_config( AiosqliteConfig( - pool_config={"database": "./myapp.db"}, + connection_config={"database": "./myapp.db"}, extension_config={ "litestar": {"commit_mode": "autocommit"} } @@ -167,7 +167,7 @@ Here's a complete working example: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "min_size": 5, "max_size": 20 @@ -303,7 +303,7 @@ Explicit transaction control (default): db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={"litestar": {"commit_mode": "manual"}} ) ) @@ -329,7 +329,7 @@ Automatic commit on 2XX responses (recommended): db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={"litestar": {"commit_mode": "autocommit"}} ) ) @@ -356,7 +356,7 @@ Commits on both 2XX and 3XX responses: db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": {"commit_mode": "autocommit_include_redirect"} } diff --git a/docs/extensions/litestar/session_stores.rst b/docs/extensions/litestar/session_stores.rst index 467d967ae..9d122c8ce 100644 --- a/docs/extensions/litestar/session_stores.rst +++ b/docs/extensions/litestar/session_stores.rst @@ -55,7 +55,7 @@ Basic Setup # 2. Add database configuration db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"litestar": {"session_table": "litestar_sessions"}}, ) ) @@ -276,7 +276,7 @@ Configure custom table names via ``extension_config``: spec = SQLSpec() config = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": { "session_table": "custom_sessions" @@ -320,7 +320,7 @@ Enable In-Memory for Oracle session stores via ``extension_config``: spec = SQLSpec() config = spec.add_config( OracleAsyncConfig( - pool_config={ + connection_config={ "user": "app_user", "password": "secure_password", "dsn": "oracle.example.com:1521/XEPDB1", @@ -412,7 +412,7 @@ Verify In-Memory status after table creation: from sqlspec.adapters.oracledb import OracleAsyncConfig - config = OracleAsyncConfig(pool_config={"dsn": "..."}) + config = OracleAsyncConfig(connection_config={"dsn": "..."}) async with config.provide_connection() as conn: cursor = conn.cursor() @@ -547,7 +547,7 @@ Session tables can be managed via SQLSpec migrations. The configuration must be spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": {"session_table": "custom_sessions"} }, diff --git a/docs/extensions/litestar/transactions.rst b/docs/extensions/litestar/transactions.rst index 73a4ad6a5..414a056eb 100644 --- a/docs/extensions/litestar/transactions.rst +++ b/docs/extensions/litestar/transactions.rst @@ -24,7 +24,7 @@ Explicit transaction control in route handlers. from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"litestar": {"commit_mode": "manual"}} ) @@ -63,7 +63,7 @@ Automatic commit on 2XX status codes, rollback on others. .. code-block:: python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"litestar": {"commit_mode": "autocommit"}} ) @@ -112,7 +112,7 @@ Commits on both 2XX and 3XX redirect status codes. .. code-block:: python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": {"commit_mode": "autocommit_include_redirect"} } @@ -136,7 +136,7 @@ Fine-tune commit/rollback behavior: .. code-block:: python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "litestar": { "commit_mode": "autocommit", diff --git a/docs/getting_started/quickstart.rst b/docs/getting_started/quickstart.rst index 87f64ec5c..9dbf84e48 100644 --- a/docs/getting_started/quickstart.rst +++ b/docs/getting_started/quickstart.rst @@ -100,7 +100,7 @@ One of SQLSpec's strengths is the consistent API across databases. Here's the sa For tests we surface the PostgreSQL connection info through the ``SQLSPEC_QUICKSTART_PG_*`` environment variables (host, port, user, password, database). When running the snippet outside pytest, export those - variables or inline your DSN in ``_pool_config()`` so the example keeps + variables or inline your DSN in ``connection_config`` so the example keeps working. diff --git a/docs/guides/adapters/asyncpg.md b/docs/guides/adapters/asyncpg.md index 71bf262d3..d06cd0fce 100644 --- a/docs/guides/adapters/asyncpg.md +++ b/docs/guides/adapters/asyncpg.md @@ -54,7 +54,7 @@ db_manager = SQLSpec() # IAM authentication (no password required) db = db_manager.add_config(AsyncpgConfig( - pool_config={ + connection_config={ "user": "my-service-account@project.iam", "database": "mydb", "min_size": 2, @@ -77,7 +77,7 @@ Password authentication is also supported: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "postgres", "password": "secret", "database": "mydb", @@ -97,7 +97,7 @@ Connect to AlloyDB instances with the same pattern: ```python # IAM authentication config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "my-service-account@project.iam", "database": "mydb", }, @@ -130,7 +130,7 @@ pip install cloud-alloydb-python-connector **Authentication Methods**: - IAM authentication: Set `cloud_sql_enable_iam_auth=True` or `alloydb_enable_iam_auth=True` -- Password authentication: Leave IAM flags as `False` (default) and provide password in `pool_config` +- Password authentication: Leave IAM flags as `False` (default) and provide password in `connection_config` For comprehensive configuration options and troubleshooting, see the [Google Cloud Connectors Guide](/guides/cloud/google-connectors.md). diff --git a/docs/guides/adapters/oracledb.md b/docs/guides/adapters/oracledb.md index 70869cfbf..5d4704a4f 100644 --- a/docs/guides/adapters/oracledb.md +++ b/docs/guides/adapters/oracledb.md @@ -78,7 +78,7 @@ The Oracle session stores (`OracleAsyncStore` and `OracleSyncStore`) support opt from sqlspec.adapters.oracledb import OracleAsyncConfig from sqlspec.adapters.oracledb.litestar import OracleAsyncStore -config = OracleAsyncConfig(pool_config={"dsn": "oracle://..."}) +config = OracleAsyncConfig(connection_config={"dsn": "oracle://..."}) # Standard table (default) store = OracleAsyncStore(config) @@ -141,7 +141,7 @@ class Article(msgspec.Struct): title: str content: str # CLOB column automatically becomes string -config = OracleAsyncConfig(pool_config={"dsn": "oracle://..."}) +config = OracleAsyncConfig(connection_config={"dsn": "oracle://..."}) async with config.provide_session() as session: # Insert large text content @@ -248,7 +248,7 @@ Oracle returns unquoted identifiers in uppercase (for example `ID`, `PRODUCT_NAM from sqlspec.adapters.oracledb import OracleAsyncConfig config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, driver_features={"enable_lowercase_column_names": True}, ) ``` @@ -261,7 +261,7 @@ config = OracleAsyncConfig( ```python config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, driver_features={"enable_lowercase_column_names": False}, ) ``` @@ -294,7 +294,7 @@ This feature enables seamless UUID handling with optimal storage efficiency: import uuid from sqlspec.adapters.oracledb import OracleAsyncConfig -config = OracleAsyncConfig(pool_config={"dsn": "oracle://..."}) +config = OracleAsyncConfig(connection_config={"dsn": "oracle://..."}) async with config.provide_session() as session: # Create table with RAW(16) for UUID storage @@ -335,7 +335,7 @@ UUID binary conversion is enabled by default (no configuration required): ```python config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, driver_features={ "enable_uuid_binary": True # Default: True (stdlib, always available) } @@ -346,7 +346,7 @@ To disable automatic conversion: ```python config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, driver_features={ "enable_uuid_binary": False # Revert to manual .bytes conversion } @@ -514,7 +514,7 @@ UUID handlers coexist with other type handlers (e.g., NumPy vectors) through han ```python config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, driver_features={ "enable_numpy_vectors": True, # NumPy vector support "enable_uuid_binary": True # UUID binary support @@ -597,7 +597,7 @@ Enable NumPy vector support via `driver_features`: from sqlspec.adapters.oracledb import OracleAsyncConfig config = OracleAsyncConfig( - pool_config={ + connection_config={ "dsn": "oracle://host:port/service_name", "user": "username", "password": "password", diff --git a/docs/guides/adapters/spanner.md b/docs/guides/adapters/spanner.md index 96b433199..95e7caac8 100644 --- a/docs/guides/adapters/spanner.md +++ b/docs/guides/adapters/spanner.md @@ -37,7 +37,7 @@ This guide provides specific instructions for the `spanner` adapter. from sqlspec.adapters.spanner import SpannerSyncConfig config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", @@ -61,7 +61,7 @@ For local development and testing, use the Spanner emulator: from google.auth.credentials import AnonymousCredentials config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "test-project", "instance_id": "test-instance", "database_id": "test-database", @@ -77,7 +77,7 @@ config = SpannerSyncConfig( from google.cloud.spanner_v1.pool import FixedSizePool, PingingPool config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", @@ -99,7 +99,7 @@ The Spanner Python client requires base64-encoded bytes when using `param_types. from sqlspec.adapters.spanner import SpannerSyncConfig config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", @@ -241,7 +241,7 @@ from sqlspec.adapters.spanner import SpannerSyncConfig from sqlspec.adapters.spanner.litestar import SpannerSyncStore config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", @@ -281,7 +281,7 @@ from sqlspec.adapters.spanner import SpannerSyncConfig from sqlspec.adapters.spanner.adk import SpannerADKStore config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "my-database", diff --git a/docs/guides/architecture/arrow-integration.md b/docs/guides/architecture/arrow-integration.md index 16c37aead..d58c23cb9 100644 --- a/docs/guides/architecture/arrow-integration.md +++ b/docs/guides/architecture/arrow-integration.md @@ -153,7 +153,7 @@ from sqlspec import SQLSpec from sqlspec.adapters.asyncpg import AsyncpgConfig sql = SQLSpec() -config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/mydb"}) +config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/mydb"}) sql.add_config(config) async with sql.provide_session() as session: @@ -278,7 +278,7 @@ from sqlspec.adapters.adbc import AdbcConfig config = AdbcConfig( driver="adbc_driver_postgresql", - pool_config={"uri": "postgresql://localhost/db"} + connection_config={"uri": "postgresql://localhost/db"} ) async with sql.provide_session(config) as session: @@ -324,7 +324,7 @@ with sql.provide_session(config) as session: from sqlspec.adapters.bigquery import BigQueryConfig config = BigQueryConfig( - pool_config={"project": "my-project"}, + connection_config={"project": "my-project"}, driver_features={"enable_storage_api": True} # Auto-detected ) @@ -350,7 +350,7 @@ async with sql.provide_session(config) as session: ```python from sqlspec.adapters.asyncpg import AsyncpgConfig -config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/db"}) +config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db"}) async with sql.provide_session(config) as session: result = await session.select_to_arrow( @@ -372,7 +372,7 @@ async with sql.provide_session(config) as session: ```python from sqlspec.adapters.oracledb import OracleAsyncConfig -config = OracleAsyncConfig(pool_config={"dsn": "oracle://localhost/FREE"}) +config = OracleAsyncConfig(connection_config={"dsn": "oracle://localhost/FREE"}) async with sql.provide_session(config) as session: result = await session.select_to_arrow( @@ -393,7 +393,7 @@ async with sql.provide_session(config) as session: ```python from sqlspec.adapters.asyncmy import AsyncmyConfig -config = AsyncmyConfig(pool_config={"dsn": "mysql://localhost/db"}) +config = AsyncmyConfig(connection_config={"dsn": "mysql://localhost/db"}) async with sql.provide_session(config) as session: result = await session.select_to_arrow( diff --git a/docs/guides/architecture/observability.md b/docs/guides/architecture/observability.md index 168078a01..1765bd17c 100644 --- a/docs/guides/architecture/observability.md +++ b/docs/guides/architecture/observability.md @@ -26,7 +26,7 @@ def ensure_extensions(connection): connection.execute("INSTALL http_client; LOAD http_client;") config = DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, driver_features={ "extensions": [{"name": "http_client"}], "on_connection_create": ensure_extensions, # promoted to observability runtime diff --git a/docs/guides/cloud/google-connectors.md b/docs/guides/cloud/google-connectors.md index 63d01d2fb..ff8b790d9 100644 --- a/docs/guides/cloud/google-connectors.md +++ b/docs/guides/cloud/google-connectors.md @@ -51,7 +51,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig sql = SQLSpec() config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "postgres", "password": "secret", "database": "mydb", @@ -72,7 +72,7 @@ async with sql.provide_session(config) as session: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "postgres", "password": "secret", "database": "mydb", @@ -140,7 +140,7 @@ The simplest method for GCP deployments. Connectors automatically use credential ```python # No explicit credentials needed - ADC handles it config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "postgres", "password": "secret", "database": "mydb", @@ -158,7 +158,7 @@ Passwordless authentication using Google Cloud IAM: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "my-service-account@my-project.iam", # IAM principal "database": "mydb", }, @@ -190,7 +190,7 @@ Traditional username/password authentication: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "postgres", "password": "secret", # From Secret Manager recommended "database": "mydb", @@ -279,7 +279,7 @@ sql = SQLSpec() # Cloud SQL production database cloud_sql_config = AsyncpgConfig( - pool_config={"user": "app", "password": "secret", "database": "prod"}, + connection_config={"user": "app", "password": "secret", "database": "prod"}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": "prod-project:us-central1:prod-db", @@ -290,7 +290,7 @@ sql.add_config(cloud_sql_config) # AlloyDB analytics database alloydb_config = AsyncpgConfig( - pool_config={"user": "analytics", "password": "secret", "database": "warehouse"}, + connection_config={"user": "analytics", "password": "secret", "database": "warehouse"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/analytics/locations/us-central1/clusters/warehouse/instances/primary", @@ -313,7 +313,7 @@ async with sql.provide_session(alloydb_config) as session: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:pass@10.0.0.5:5432/mydb", "ssl": ssl_context, # Manual SSL setup } @@ -324,7 +324,7 @@ config = AsyncpgConfig( ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "user": "user", "password": "pass", "database": "mydb", @@ -431,7 +431,7 @@ Connectors work seamlessly with AsyncPG connection pooling: ```python config = AsyncpgConfig( - pool_config={ + connection_config={ "min_size": 2, "max_size": 10, "max_inactive_connection_lifetime": 300, @@ -488,7 +488,7 @@ cloud-sql-proxy my-project:us-central1:my-instance --port 5432 # Connect with any adapter config = PsqlpyConfig( # Or ADBC, psycopg, etc. - pool_config={"dsn": "postgresql://localhost:5432/mydb"} + connection_config={"dsn": "postgresql://localhost:5432/mydb"} ) ``` diff --git a/docs/guides/development/code-standards.md b/docs/guides/development/code-standards.md index a71bce705..b679116db 100644 --- a/docs/guides/development/code-standards.md +++ b/docs/guides/development/code-standards.md @@ -351,7 +351,7 @@ class MyMetadata: ```python # GOOD - Function-based test def test_config_validation(): - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) assert config.is_async is True # BAD - Class-based test (PROHIBITED) @@ -370,7 +370,7 @@ import tempfile def test_starlette_autocommit_mode() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) # Test logic - each test gets isolated database diff --git a/docs/guides/development/implementation-patterns.md b/docs/guides/development/implementation-patterns.md index 00c98d5f8..1a8ab4d8e 100644 --- a/docs/guides/development/implementation-patterns.md +++ b/docs/guides/development/implementation-patterns.md @@ -218,7 +218,7 @@ def register_handlers(connection: "Connection") -> None: ```python async def _create_pool(self) -> Pool: - config = dict(self.pool_config) + config = dict(self.connection_config) if self.driver_features.get("enable_feature", False): config["session_callback"] = self._init_connection @@ -317,7 +317,7 @@ def _after_request_handler(self, response: "Response") -> "Response": ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -441,8 +441,8 @@ def _setup_cloud_sql_connector(self, config: dict[str, Any]) -> None: ```python async def _close_pool(self) -> None: - if self.pool_instance: - await self.pool_instance.close() + if self.connection_instance: + await self.connection_instance.close() if self._cloud_sql_connector is not None: await self._cloud_sql_connector.close_async() diff --git a/docs/guides/extensions/adk.md b/docs/guides/extensions/adk.md index 775b269f0..719a9422c 100644 --- a/docs/guides/extensions/adk.md +++ b/docs/guides/extensions/adk.md @@ -60,7 +60,7 @@ from sqlspec.extensions.adk import SQLSpecSessionService async def build_service() -> SQLSpecSessionService: config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/agents"}, + connection_config={"dsn": "postgresql://localhost/agents"}, extension_config={ "adk": { "session_table": "adk_sessions", @@ -117,7 +117,7 @@ from sqlspec.extensions.adk import SQLSpecSessionService def build_spanner_service() -> SQLSpecSessionService: config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance_id": "my-instance", "database_id": "agents", @@ -169,7 +169,7 @@ adk_config = cast("ADKConfig", { "events_table": "agent_events", "owner_id_column": "workspace_id UUID REFERENCES workspaces(id) ON DELETE SET NULL", }) -config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}, +config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}, extension_config={"adk": adk_config}) ``` diff --git a/docs/guides/extensions/aiosql.md b/docs/guides/extensions/aiosql.md index d351d4c31..5a940434f 100644 --- a/docs/guides/extensions/aiosql.md +++ b/docs/guides/extensions/aiosql.md @@ -43,7 +43,7 @@ from sqlspec.extensions.aiosql import AiosqlAsyncAdapter async def main() -> None: spec = SQLSpec() - config = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/app"})) + config = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/app"})) async with spec.provide_driver(config) as driver: adapter = AiosqlAsyncAdapter(driver) diff --git a/docs/guides/extensions/fastapi.md b/docs/guides/extensions/fastapi.md index 91c471a09..41e05a007 100644 --- a/docs/guides/extensions/fastapi.md +++ b/docs/guides/extensions/fastapi.md @@ -26,7 +26,7 @@ from sqlspec.extensions.fastapi import SQLSpecPlugin sqlspec = SQLSpec() config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -60,7 +60,7 @@ Configure the plugin via `extension_config["starlette"]` in your database config from sqlspec.config import StarletteConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": StarletteConfig( commit_mode="autocommit", @@ -106,7 +106,7 @@ from sqlspec.extensions.fastapi import SQLSpecPlugin sqlspec = SQLSpec() sqlspec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={ "starlette": { "session_key": "primary", @@ -120,7 +120,7 @@ sqlspec.add_config( sqlspec.add_config( AiosqliteConfig( - pool_config={"database": "analytics.db"}, + connection_config={"database": "analytics.db"}, extension_config={ "starlette": { "session_key": "analytics", @@ -210,7 +210,7 @@ Requires explicit transaction management: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"starlette": {"commit_mode": "manual"}} ) @@ -242,7 +242,7 @@ Automatically commits on 2xx status codes, rolls back otherwise: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) @@ -273,7 +273,7 @@ Commits on 2xx and 3xx status codes: from fastapi.responses import RedirectResponse config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"starlette": {"commit_mode": "autocommit_include_redirect"}} ) @@ -309,7 +309,7 @@ from sqlspec.extensions.fastapi import SQLSpecPlugin sqlspec = SQLSpec() pg_config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -319,7 +319,7 @@ pg_config = AsyncpgConfig( ) mysql_config = AsyncmyConfig( - pool_config={"dsn": "mysql://localhost/analytics"}, + connection_config={"dsn": "mysql://localhost/analytics"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -499,7 +499,7 @@ from sqlspec.adapters.aiosqlite import AiosqliteConfig def test_users_endpoint(): sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) @@ -677,7 +677,7 @@ If you're using the old manual pattern, migration is straightforward: from collections.abc import AsyncGenerator spec = SQLSpec() -db_config = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://..."})) +db_config = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://..."})) @asynccontextmanager @@ -715,7 +715,7 @@ async def create_user( ```python sqlspec = SQLSpec() config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) diff --git a/docs/guides/extensions/flask.md b/docs/guides/extensions/flask.md index 89653e6f6..a22a84b51 100644 --- a/docs/guides/extensions/flask.md +++ b/docs/guides/extensions/flask.md @@ -35,7 +35,7 @@ from sqlspec.extensions.flask import SQLSpecPlugin sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": { "commit_mode": "autocommit", @@ -65,7 +65,7 @@ Configure the Flask extension via `extension_config["flask"]`: from sqlspec.config import FlaskConfig config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": FlaskConfig( connection_key="db_connection", # Optional: Flask g object key for connection @@ -103,14 +103,14 @@ sqlspec = SQLSpec() sqlspec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={"flask": {"session_key": "primary", "commit_mode": "autocommit"}}, ) ) sqlspec.add_config( SqliteConfig( - pool_config={"database": "analytics.db"}, + connection_config={"database": "analytics.db"}, extension_config={"flask": {"session_key": "analytics", "commit_mode": "manual"}}, ) ) @@ -158,7 +158,7 @@ from sqlspec.extensions.flask import SQLSpecPlugin sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": {"commit_mode": "manual", "session_key": "db"} } @@ -192,7 +192,7 @@ Automatically commits on successful responses (2xx status codes), rolls back oth ```python config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": {"commit_mode": "autocommit", "session_key": "db"} } @@ -235,7 +235,7 @@ Commits on 2xx and 3xx status codes (including redirects): ```python config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": {"commit_mode": "autocommit_include_redirect", "session_key": "db"} } @@ -266,7 +266,7 @@ Add extra status codes for commit/rollback: ```python config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": { "commit_mode": "autocommit", @@ -291,7 +291,7 @@ sqlspec = SQLSpec() # Primary database (user data) users_config = SqliteConfig( - pool_config={"database": "users.db"}, + connection_config={"database": "users.db"}, extension_config={ "flask": { "commit_mode": "autocommit", @@ -302,7 +302,7 @@ users_config = SqliteConfig( # Analytics database (events) events_config = DuckDBConfig( - pool_config={"database": "events.db"}, + connection_config={"database": "events.db"}, extension_config={ "flask": { "commit_mode": "autocommit", @@ -357,7 +357,7 @@ def create_app(config=None): # Create SQLSpec and config sqlspec = SQLSpec() db_config = SqliteConfig( - pool_config={"database": app.config.get("DATABASE_URL", "app.db")}, + connection_config={"database": app.config.get("DATABASE_URL", "app.db")}, extension_config={ "flask": { "commit_mode": app.config.get("DB_COMMIT_MODE", "autocommit"), @@ -427,7 +427,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig sqlspec = SQLSpec() config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "flask": {"commit_mode": "autocommit", "session_key": "db"} } @@ -577,7 +577,7 @@ Sync adapters support connection pooling: from sqlspec.adapters.sqlite import SqliteConfig config = SqliteConfig( - pool_config={ + connection_config={ "database": "app.db", "check_same_thread": False, # Required for pooling "timeout": 30.0 @@ -619,7 +619,7 @@ from sqlspec.extensions.flask import SQLSpecPlugin sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={"flask": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) @@ -667,11 +667,11 @@ plugin.init_app(app) # Error: already registered ```python config1 = SqliteConfig( - pool_config={"database": "db1.db"}, + connection_config={"database": "db1.db"}, extension_config={"flask": {"session_key": "db1"}} ) config2 = SqliteConfig( - pool_config={"database": "db2.db"}, + connection_config={"database": "db2.db"}, extension_config={"flask": {"session_key": "db2"}} # Must be unique! ) ``` diff --git a/docs/guides/extensions/litestar.md b/docs/guides/extensions/litestar.md index 6443d40fd..eff4350b7 100644 --- a/docs/guides/extensions/litestar.md +++ b/docs/guides/extensions/litestar.md @@ -43,7 +43,7 @@ from sqlspec.extensions.litestar import SQLSpecPlugin spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/app"}, + connection_config={"dsn": "postgresql://localhost/app"}, extension_config={ "litestar": { "commit_mode": "autocommit", @@ -86,7 +86,7 @@ Override dependency keys for multi-tenant setups: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/primary"}, + connection_config={"dsn": "postgresql://localhost/primary"}, extension_config={ "litestar": { "session_key": "primary_db", @@ -127,7 +127,7 @@ from sqlspec.extensions.litestar import SQLSpecPlugin spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/app"}, + connection_config={"dsn": "postgresql://localhost/app"}, extension_config={"litestar": {"session_table": "litestar_sessions"}}, ) ) diff --git a/docs/guides/extensions/sanic.md b/docs/guides/extensions/sanic.md index d461d3a14..16fe696d9 100644 --- a/docs/guides/extensions/sanic.md +++ b/docs/guides/extensions/sanic.md @@ -38,7 +38,7 @@ extend = Extend(app) spec = SQLSpec() db_config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/service"}), + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/service"}), ) diff --git a/docs/guides/extensions/starlette.md b/docs/guides/extensions/starlette.md index 4bb9fced0..94f1eea83 100644 --- a/docs/guides/extensions/starlette.md +++ b/docs/guides/extensions/starlette.md @@ -29,7 +29,7 @@ from sqlspec.extensions.starlette import SQLSpecPlugin sqlspec = SQLSpec() config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -64,7 +64,7 @@ Configure the plugin via `extension_config["starlette"]` in your database config from sqlspec.config import StarletteConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": StarletteConfig( commit_mode="autocommit", @@ -108,7 +108,7 @@ from starlette.routing import Route sqlspec = SQLSpec() sqlspec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={ "starlette": { "session_key": "primary", @@ -122,7 +122,7 @@ sqlspec.add_config( sqlspec.add_config( AiosqliteConfig( - pool_config={"database": "analytics.db"}, + connection_config={"database": "analytics.db"}, extension_config={ "starlette": { "session_key": "analytics", @@ -171,7 +171,7 @@ Requires explicit transaction management: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"starlette": {"commit_mode": "manual"}} ) @@ -203,7 +203,7 @@ Automatically commits on 2xx status codes, rolls back otherwise: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) @@ -232,7 +232,7 @@ Commits on 2xx and 3xx status codes: ```python config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={"starlette": {"commit_mode": "autocommit_include_redirect"}} ) @@ -268,7 +268,7 @@ from sqlspec.extensions.starlette import SQLSpecPlugin sqlspec = SQLSpec() pg_config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -278,7 +278,7 @@ pg_config = AsyncpgConfig( ) mysql_config = AsyncmyConfig( - pool_config={"dsn": "mysql://localhost/analytics"}, + connection_config={"dsn": "mysql://localhost/analytics"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -391,7 +391,7 @@ from sqlspec.adapters.aiosqlite import AiosqliteConfig def test_users_endpoint(): sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) @@ -517,7 +517,7 @@ If you're using the old manual pattern, migration is straightforward: ```python spec = SQLSpec() -config = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://..."})) +config = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://..."})) @asynccontextmanager @@ -542,7 +542,7 @@ app = Starlette(routes=[Route("/users", list_users)], lifespan=lifespan) ```python sqlspec = SQLSpec() config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) diff --git a/docs/guides/migration/connection-config.md b/docs/guides/migration/connection-config.md new file mode 100644 index 000000000..c1311aa5a --- /dev/null +++ b/docs/guides/migration/connection-config.md @@ -0,0 +1,314 @@ +# Migration Guide: pool_config → connection_config + +**Version:** 0.33.0 +**Type:** Breaking Change +**Impact:** All SQLSpec configurations + +## Summary + +SQLSpec 0.33.0 standardizes configuration parameter naming across all database adapters: + +- `pool_config` → `connection_config` (configuration dictionary) +- `pool_instance` → `connection_instance` (pre-created pool/connection instance) + +This change affects **all 11 database adapters** and provides a consistent, intuitive API. + +## Why This Change? + +### Previous Inconsistency + +Before this change, SQLSpec had inconsistent parameter naming: + +- **9 pooled adapters** used `pool_config` and `pool_instance` +- **2 non-pooled adapters** (BigQuery, ADBC) used `connection_config` and `pool_instance` + +This created confusion: +- `pool_instance` didn't make semantic sense for non-pooled adapters +- New users had to learn which adapters used which parameter names +- Documentation had to explain the split + +### New Consistency + +After this change, **all adapters** use the same parameter names: + +```python +# Every adapter now follows the same pattern +config = AdapterConfig( + connection_config={...}, # Settings for connection/pool + connection_instance=instance # Pre-created pool or connection +) +``` + +**Benefits:** +- Single pattern to learn and remember +- Works semantically for both pooled and non-pooled adapters +- Reduced cognitive load when switching between adapters +- Clearer documentation + +## Migration Steps + +### 1. Search and Replace + +The simplest migration approach is a global search and replace: + +**For Python files:** + +```bash +# Replace pool_config → connection_config +find . -type f -name "*.py" -exec sed -i 's/pool_config=/connection_config=/g' {} + + +# Replace pool_instance → connection_instance +find . -type f -name "*.py" -exec sed -i 's/pool_instance=/connection_instance=/g' {} + +``` + +**For IDE users:** + +1. Global search for `pool_config=` +2. Replace all with `connection_config=` +3. Global search for `pool_instance=` +4. Replace all with `connection_instance=` + +### 2. Before and After Examples + +#### Pooled Adapters (PostgreSQL, MySQL, etc.) + +**Before:** + +```python +from sqlspec.adapters.asyncpg import AsyncpgConfig + +config = AsyncpgConfig( + pool_config={ + "dsn": "postgresql://localhost/mydb", + "min_size": 5, + "max_size": 20, + } +) + +# Or with pre-created pool +config = AsyncpgConfig( + pool_instance=my_existing_pool +) +``` + +**After:** + +```python +from sqlspec.adapters.asyncpg import AsyncpgConfig + +config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/mydb", + "min_size": 5, + "max_size": 20, + } +) + +# Or with pre-created pool +config = AsyncpgConfig( + connection_instance=my_existing_pool +) +``` + +#### SQLite with Custom Pool + +**Before:** + +```python +from sqlspec.adapters.sqlite import SqliteConfig + +config = SqliteConfig( + pool_config={ + "database": "mydb.db", + "check_same_thread": False, + "pool_min_size": 5, + "pool_max_size": 10, + } +) +``` + +**After:** + +```python +from sqlspec.adapters.sqlite import SqliteConfig + +config = SqliteConfig( + connection_config={ + "database": "mydb.db", + "check_same_thread": False, + "pool_min_size": 5, + "pool_max_size": 10, + } +) +``` + +#### Non-Pooled Adapters (BigQuery, ADBC) + +**Before:** + +```python +from sqlspec.adapters.bigquery import BigQueryConfig + +# Already used connection_config, but pool_instance was misleading +config = BigQueryConfig( + connection_config={ + "project": "my-project", + "dataset_id": "my-dataset", + }, + pool_instance=my_client # Misleading name! +) +``` + +**After:** + +```python +from sqlspec.adapters.bigquery import BigQueryConfig + +# Now semantically correct +config = BigQueryConfig( + connection_config={ + "project": "my-project", + "dataset_id": "my-dataset", + }, + connection_instance=my_client # Clear! +) +``` + +#### Framework Extensions + +**Before:** + +```python +from sqlspec.adapters.asyncpg import AsyncpgConfig + +config = AsyncpgConfig( + pool_config={"dsn": "postgresql://localhost/db"}, + extension_config={ + "litestar": {"commit_mode": "autocommit"} + } +) +``` + +**After:** + +```python +from sqlspec.adapters.asyncpg import AsyncpgConfig + +config = AsyncpgConfig( + connection_config={"dsn": "postgresql://localhost/db"}, + extension_config={ + "litestar": {"commit_mode": "autocommit"} + } +) +``` + +### 3. Verification + +After migration, verify your changes: + +```bash +# Run linting to catch any issues +make lint + +# Run type checking +make mypy + +# Run your test suite +pytest +``` + +## Breaking Changes + +### What Changed + +1. **Parameter names** in all adapter config classes: + - `pool_config` parameter → `connection_config` + - `pool_instance` parameter → `connection_instance` + +2. **Attribute names** in config instances: + - `config.pool_config` attribute → `config.connection_config` + - `config.pool_instance` attribute → `config.connection_instance` + +3. **Custom pool constructors**: + - `SqliteConnectionPool(**pool_config)` → `SqliteConnectionPool(**connection_config)` + - `AiosqliteConnectionPool(**pool_config)` → `AiosqliteConnectionPool(**connection_config)` + - `DuckDBConnectionPool(**pool_config)` → `DuckDBConnectionPool(**connection_config)` + +### What Didn't Change + +1. **Connection configuration keys** remain the same: + ```python + # These keys are unchanged + connection_config={ + "dsn": "...", # Same key + "min_size": 5, # Same key + "max_size": 20, # Same key + } + ``` + +2. **Driver implementation** unchanged - no behavior changes + +3. **Extension configuration** unchanged - same `extension_config` parameter + +4. **Statement configuration** unchanged - same `statement_config` parameter + +## Affected Components + +### All Adapters (11 total) + +1. AsyncPG (`asyncpg`) +2. Psycopg (`psycopg`) +3. Asyncmy (`asyncmy`) +4. Psqlpy (`psqlpy`) +5. OracleDB (`oracledb`) +6. SQLite (`sqlite`) +7. AioSQLite (`aiosqlite`) +8. DuckDB (`duckdb`) +9. BigQuery (`bigquery`) +10. ADBC (`adbc`) +11. Spanner (`spanner`) + +### Framework Extensions + +- Litestar plugin +- Starlette extension +- FastAPI extension +- Flask extension + +### Custom Pools + +- `SqliteConnectionPool` +- `AiosqliteConnectionPool` +- `DuckDBConnectionPool` + +## Timeline + +- **Introduced:** v0.33.0 +- **Deprecation Period:** None (clean break) +- **Removal of Old Names:** Immediate in v0.33.0 + +## Rationale + +This breaking change was chosen over a deprecation period because: + +1. **Simple migration** - Automated search and replace +2. **Early in lifecycle** - SQLSpec is pre-1.0, breaking changes expected +3. **Clear improvement** - Eliminates confusion, improves consistency +4. **Low effort** - Mechanical change with high value + +## Support + +If you encounter issues during migration: + +1. Check this guide for examples +2. Search for remaining `pool_config` or `pool_instance` references: + ```bash + grep -r "pool_config\|pool_instance" your_project/ + ``` +3. File an issue on GitHub if you find edge cases not covered here + +## See Also + +- [Configuration Guide](../usage/configuration.rst) +- [CHANGELOG](../../changelog.rst) +- [GitHub Release Notes](https://github.com/litestar-org/sqlspec/releases) diff --git a/docs/guides/migrations/hybrid-versioning.md b/docs/guides/migrations/hybrid-versioning.md index 7e617f551..bc2990378 100644 --- a/docs/guides/migrations/hybrid-versioning.md +++ b/docs/guides/migrations/hybrid-versioning.md @@ -282,7 +282,7 @@ For Python-based migration automation, use the config method directly instead of from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://user:pass@localhost/mydb"}, + connection_config={"dsn": "postgresql://user:pass@localhost/mydb"}, migration_config={ "enabled": True, "script_location": "migrations", @@ -305,7 +305,7 @@ await config.fix_migrations(dry_run=False, update_database=False, yes=True) from sqlspec.adapters.sqlite import SqliteConfig config = SqliteConfig( - pool_config={"database": "myapp.db"}, + connection_config={"database": "myapp.db"}, migration_config={ "enabled": True, "script_location": "migrations", @@ -339,7 +339,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig async def deploy(): config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, migration_config={"script_location": "migrations"} ) @@ -869,7 +869,7 @@ Auto-sync is enabled by default. Disable via config: from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, migration_config={ "script_location": "migrations", "enabled": True, @@ -1022,7 +1022,7 @@ Works with custom migration directories: ```python # config.py AsyncpgConfig( - pool_config={"dsn": "..."}, + connection_config={"dsn": "..."}, migration_config={ "script_location": "db/migrations", # Custom path "enabled": True diff --git a/docs/guides/testing/testing.md b/docs/guides/testing/testing.md index 273b0fbb4..8282cc88b 100644 --- a/docs/guides/testing/testing.md +++ b/docs/guides/testing/testing.md @@ -99,7 +99,7 @@ from tests.fixtures.sql_utils import apply_ddl @pytest.fixture(scope="session") def asyncpg_config(postgres_service): return AsyncpgConfig( - pool_config={"dsn": postgres_service.connection_url()}, + connection_config={"dsn": postgres_service.connection_url()}, extension_config={"litestar": {"session_table": "litestar_sessions"}}, ) @@ -181,7 +181,7 @@ def test_starlette_integration() -> None: """Test with isolated temporary database.""" with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) # Each test gets its own isolated database file diff --git a/docs/guides/upsert.md b/docs/guides/upsert.md index 152fddc7c..1c413fd61 100644 --- a/docs/guides/upsert.md +++ b/docs/guides/upsert.md @@ -290,8 +290,8 @@ from sqlspec import sql, SQLSpec, AsyncpgConfig, SqliteConfig # Configure multi-database setup sqlspec = SQLSpec() -sqlspec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/main"})) -sqlspec.add_config(SqliteConfig(pool_config={"database": "cache.db"})) +sqlspec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/main"})) +sqlspec.add_config(SqliteConfig(connection_config={"database": "cache.db"})) # Use default dialect from factory default_upsert = sql.upsert("products") # Uses first configured dialect diff --git a/docs/guides/writing/documentation-style-guide.md b/docs/guides/writing/documentation-style-guide.md index fe2a0137e..83d00fe5a 100644 --- a/docs/guides/writing/documentation-style-guide.md +++ b/docs/guides/writing/documentation-style-guide.md @@ -120,7 +120,7 @@ def execute(query: str) -> list[dict[str, Any]]: > "The pool size configuration has been updated. It now defaults to 10 instead of the old default of 5." ✅ **Good - Documentation:** -> "The pool size defaults to 10 connections. Configure `pool_config={'min_size': 10}` to adjust." +> "The pool size defaults to 10 connections. Configure `connection_config={'min_size': 10}` to adjust." **Exceptions (use version annotations):** @@ -471,7 +471,7 @@ from sqlspec.adapters.asyncpg import AsyncpgConfig # Configure database spec = SQLSpec() config = spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/db"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db"}) ) # Create store @@ -508,7 +508,7 @@ Use inline code for: ```python # Good: Explains why config = AsyncpgConfig( - pool_config={ + connection_config={ "min_size": 10, # Keep connections warm for fast response "max_size": 50 # Prevent pool exhaustion under load } @@ -516,7 +516,7 @@ config = AsyncpgConfig( # Bad: Explains obvious what config = AsyncpgConfig( # Create config - pool_config={ # Pool config + connection_config={ # Pool config "min_size": 10, # Set min size to 10 ``` diff --git a/docs/reference/adapters.rst b/docs/reference/adapters.rst index 4e07cfa8a..c91f036db 100644 --- a/docs/reference/adapters.rst +++ b/docs/reference/adapters.rst @@ -74,7 +74,7 @@ AsyncPG sql = SQLSpec() db = sql.add_config( AsyncpgConfig( - pool_config=AsyncpgPoolConfig( + connection_config=AsyncpgPoolConfig( dsn="postgresql://user:password@localhost:5432/mydb", min_size=5, max_size=20, @@ -147,7 +147,7 @@ Psycopg sql = SQLSpec() db = sql.add_config( PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:password@localhost:5432/mydb", "min_size": 5, "max_size": 20 @@ -168,7 +168,7 @@ Psycopg sql = SQLSpec() db = sql.add_config( PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": "postgresql://user:password@localhost:5432/mydb", "min_size": 5, "max_size": 20 @@ -239,7 +239,7 @@ psqlpy sql = SQLSpec() db = sql.add_config( PsqlpyConfig( - pool_config={ + connection_config={ "dsn": "postgresql://user:password@localhost:5432/mydb", "max_pool_size": 20 } @@ -323,7 +323,7 @@ sqlite sql = SQLSpec() db = sql.add_config( SqliteConfig( - pool_config={ + connection_config={ "database": "/path/to/database.db", "timeout": 30.0, "check_same_thread": False @@ -385,7 +385,7 @@ aiosqlite sql = SQLSpec() db = sql.add_config( AiosqliteConfig( - pool_config={ + connection_config={ "database": "/path/to/database.db", "timeout": 30.0 } @@ -454,7 +454,7 @@ asyncmy sql = SQLSpec() db = sql.add_config( AsyncmyConfig( - pool_config={ + connection_config={ "host": "localhost", "port": 3306, "user": "root", @@ -551,7 +551,7 @@ duckdb sql = SQLSpec() db = sql.add_config( DuckDBConfig( - pool_config={ + connection_config={ "database": "/path/to/analytics.db", "threads": 4, "memory_limit": "4GB" @@ -574,7 +574,7 @@ duckdb **Community Extensions**: DuckDBConfig accepts the runtime flags DuckDB expects for community/unsigned extensions via -``pool_config`` (for example ``allow_community_extensions=True``, +``connection_config`` (for example ``allow_community_extensions=True``, ``allow_unsigned_extensions=True``, ``enable_external_access=True``). SQLSpec applies those options with ``SET`` statements immediately after establishing each connection, so even older DuckDB builds that do not recognize the options during ``duckdb.connect()`` will still enable the @@ -653,7 +653,7 @@ bigquery sql = SQLSpec() db = sql.add_config( BigQueryConfig( - pool_config={ + connection_config={ "project": "my-project-id", "credentials": credentials } @@ -730,7 +730,7 @@ spanner sql = SQLSpec() db = sql.add_config( SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project-id", "instance_id": "my-instance", "database_id": "my-database", @@ -815,7 +815,7 @@ oracledb sql = SQLSpec() db = sql.add_config( OracleAsyncConfig( - pool_config={ + connection_config={ "user": "system", "password": "oracle", "dsn": "localhost:1521/XE", @@ -838,7 +838,7 @@ oracledb sql = SQLSpec() db = sql.add_config( OracleSyncConfig( - pool_config={ + connection_config={ "user": "system", "password": "oracle", "dsn": "localhost:1521/XE" @@ -860,7 +860,7 @@ oracledb sql = SQLSpec() db = sql.add_config( OracleAsyncConfig( - pool_config={ + connection_config={ "user": "system", "password": "oracle", "dsn": "localhost:1521/FREEPDB1" @@ -1286,7 +1286,7 @@ Type handlers are configured via the ``driver_features`` parameter: sql = SQLSpec() db = sql.add_config(OracleAsyncConfig( - pool_config={"dsn": "localhost:1521/FREEPDB1"} + connection_config={"dsn": "localhost:1521/FREEPDB1"} # enable_numpy_vectors automatically set to True if numpy installed )) @@ -1296,7 +1296,7 @@ Type handlers are configured via the ``driver_features`` parameter: # Explicitly disable optional feature db = sql.add_config(OracleAsyncConfig( - pool_config={"dsn": "localhost:1521/FREEPDB1"}, + connection_config={"dsn": "localhost:1521/FREEPDB1"}, driver_features={"enable_numpy_vectors": False} # Force disable )) @@ -1353,7 +1353,7 @@ PostgreSQL's pgvector extension enables vector similarity search. SQLSpec automa sql = SQLSpec() db = sql.add_config(AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"} + connection_config={"dsn": "postgresql://localhost/mydb"} # pgvector automatically registered if available )) diff --git a/docs/reference/base.rst b/docs/reference/base.rst index c7deff8ee..8c8177c33 100644 --- a/docs/reference/base.rst +++ b/docs/reference/base.rst @@ -34,7 +34,7 @@ SQLSpec Registry sql = SQLSpec() config = sql.add_config( AsyncpgConfig( - pool_config={"host": "localhost", "database": "mydb"} + connection_config={"host": "localhost", "database": "mydb"} ) ) @@ -49,7 +49,7 @@ All database adapter configurations inherit from base protocol classes defined i Connection Pooling ================== -Connection pooling is configured via adapter-specific TypedDicts passed to the ``pool_config`` parameter. +Connection pooling is configured via adapter-specific TypedDicts passed to the ``connection_config`` parameter. Session Management ================== @@ -98,7 +98,7 @@ Manual lifecycle control: .. code-block:: python sql = SQLSpec() - config = sql.add_config(AsyncpgConfig(pool_config={...})) + config = sql.add_config(AsyncpgConfig(connection_config={...})) # Startup pools explicitly # Pools created lazily on first use @@ -141,7 +141,7 @@ Sessions are provided using the config instance returned from ``add_config``: .. code-block:: python # Config instance IS the handle - config = sql.add_config(AsyncpgConfig(pool_config={...})) + config = sql.add_config(AsyncpgConfig(connection_config={...})) async with sql.provide_session(config) as session: ... diff --git a/docs/reference/extensions.rst b/docs/reference/extensions.rst index 15566dfd8..1c5069acc 100644 --- a/docs/reference/extensions.rst +++ b/docs/reference/extensions.rst @@ -69,7 +69,7 @@ See :doc:`/extensions/adk/index` for comprehensive documentation including: from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore from sqlspec.extensions.adk import SQLSpecSessionService - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) await store.create_tables() @@ -147,7 +147,7 @@ Plugin sql = SQLSpec() db = sql.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/db"} + connection_config={"dsn": "postgresql://localhost/db"} ) ) diff --git a/docs/usage/drivers_and_querying.rst b/docs/usage/drivers_and_querying.rst index 621c03b55..1bbf9a1d1 100644 --- a/docs/usage/drivers_and_querying.rst +++ b/docs/usage/drivers_and_querying.rst @@ -255,7 +255,7 @@ Google Cloud BigQuery for large-scale analytics. from sqlspec.adapters.bigquery import BigQueryConfig config = BigQueryConfig( - pool_config={ + connection_config={ "project": "my-project", "credentials": credentials_object, } @@ -287,7 +287,7 @@ Globally distributed, horizontally scalable database with strong consistency. from sqlspec.adapters.spanner import SpannerSyncConfig config = SpannerSyncConfig( - pool_config={ + connection_config={ "project": "my-project", "instance": "my-instance", "database": "my-database", diff --git a/docs/usage/framework_integrations.rst b/docs/usage/framework_integrations.rst index 5860fd6f6..9ae4b7362 100644 --- a/docs/usage/framework_integrations.rst +++ b/docs/usage/framework_integrations.rst @@ -34,7 +34,7 @@ Basic Setup spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/mydb", "min_size": 10, "max_size": 20, @@ -103,7 +103,7 @@ You control transaction boundaries explicitly: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": {"commit_mode": "manual"} # Default } @@ -132,7 +132,7 @@ Automatically commits on successful requests (2xx responses): spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": {"commit_mode": "autocommit"} # Auto-commit on 2xx } @@ -162,7 +162,7 @@ Commits on both 2xx and 3xx responses: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": {"commit_mode": "autocommit_include_redirect"} } @@ -183,7 +183,7 @@ Customize the dependency injection keys via ``extension_config``: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": { "connection_key": "database", # Default: "db_connection" @@ -216,7 +216,7 @@ The plugin supports multiple database configurations through a single SQLSpec in # Main database main_db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/main"}, + connection_config={"dsn": "postgresql://localhost/main"}, extension_config={ "litestar": { "session_key": "main_db", @@ -229,7 +229,7 @@ The plugin supports multiple database configurations through a single SQLSpec in # Analytics database analytics_db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/analytics"}, + connection_config={"dsn": "postgresql://localhost/analytics"}, extension_config={ "litestar": { "session_key": "analytics_db", @@ -275,7 +275,7 @@ Use SQLSpec as a session backend for Litestar: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/db"}, + connection_config={"dsn": "postgresql://localhost/db"}, extension_config={ "litestar": {"session_table": "litestar_sessions"} }, @@ -336,7 +336,7 @@ Enable request correlation tracking via ``extension_config``: spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": { "enable_correlation_middleware": True, # Default: True @@ -371,7 +371,7 @@ Basic Setup spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={ + connection_config={ "dsn": "postgresql://localhost/mydb", "min_size": 10, "max_size": 20, @@ -452,10 +452,10 @@ Support multiple databases with different dependencies: .. code-block:: python # Main database - main_db = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/main"})) + main_db = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/main"})) # Analytics database - analytics_db = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/analytics"})) + analytics_db = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/analytics"})) # Dependency functions async def get_main_db(): @@ -497,7 +497,7 @@ Basic Setup # Initialize SQLSpec spec = SQLSpec() - db = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/db"})) + db = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db"})) # Store in app context app.ctx.sqlspec = spec @@ -562,7 +562,7 @@ Basic Setup # Initialize SQLSpec spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": "app.db"})) + db = spec.add_config(SqliteConfig(connection_config={"database": "app.db"})) Using Request Context ^^^^^^^^^^^^^^^^^^^^^ @@ -657,7 +657,7 @@ For simple applications with a single database: cls._instance = super().__new__(cls) cls._spec = SQLSpec() cls._config = cls._spec.add_config( - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/db"}) + AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db"}) ) return cls._instance @@ -678,7 +678,7 @@ Best Practices # Prefer Litestar plugin over manual setup spec = SQLSpec() - db = spec.add_config(AsyncpgConfig(pool_config={"dsn": "postgresql://..."})) + db = spec.add_config(AsyncpgConfig(connection_config={"dsn": "postgresql://..."})) app = Litestar(plugins=[SQLSpecPlugin(sqlspec=spec)]) **2. Always Clean Up Pools** @@ -713,7 +713,7 @@ Best Practices spec = SQLSpec() db = spec.add_config( AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "litestar": {"commit_mode": "autocommit"} } @@ -764,7 +764,7 @@ Testing with Framework Integration @pytest.fixture async def test_db(): spec = SQLSpec() - db = spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + db = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) async with spec.provide_session(db) as session: # Set up test schema diff --git a/specs/guides/patterns/README.md b/specs/guides/patterns/README.md index 74add096a..fe0942f83 100644 --- a/specs/guides/patterns/README.md +++ b/specs/guides/patterns/README.md @@ -54,7 +54,7 @@ The PRD agent: Cross-adapter implementation patterns that apply to all database adapters: -- **Configuration Pattern**: pool_config TypedDict, driver_features, bind_key +- **Configuration Pattern**: connection_config TypedDict, driver_features, bind_key - **Type Handler Pattern**: Input/output type handlers, graceful degradation - **Exception Handling Pattern**: wrap_exceptions, SQLSpec exception hierarchy - **Connection Lifecycle Pattern**: provide_connection, provide_session, pool management diff --git a/specs/guides/patterns/adapter-patterns.md b/specs/guides/patterns/adapter-patterns.md index 23623fcfc..c888f7331 100644 --- a/specs/guides/patterns/adapter-patterns.md +++ b/specs/guides/patterns/adapter-patterns.md @@ -46,13 +46,13 @@ Each database library has its own configuration style, parameter names, and defa Use TypedDict for strongly-typed configuration with three-tier structure: 1. **ConnectionConfig**: Basic connection parameters 2. **PoolConfig**: Connection pool settings (inherits ConnectionConfig) -3. **DatabaseConfig**: SQLSpec wrapper with pool_config, driver_features, bind_key +3. **DatabaseConfig**: SQLSpec wrapper with connection_config, driver_features, bind_key **Key principles**: 1. Use TypedDict with NotRequired fields for optional parameters 2. Inherit PoolConfig from ConnectionConfig to DRY -3. Provide explicit pool_config parameter (dict or TypedDict) -4. Support pool_instance for pre-configured pools +3. Provide explicit connection_config parameter (dict or TypedDict) +4. Support connection_instance for pre-configured pools 5. Include bind_key for multi-database support 6. Use extension_config for framework-specific settings @@ -128,8 +128,8 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, Pool, AsyncpgDriver]) def __init__( self, *, - pool_config: "AsyncpgPoolConfig | dict[str, Any] | None" = None, - pool_instance: "Pool | None" = None, + connection_config: "AsyncpgPoolConfig | dict[str, Any] | None" = None, + connection_instance: "Pool | None" = None, migration_config: "dict[str, Any] | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "AsyncpgDriverFeatures | dict[str, Any] | None" = None, @@ -139,8 +139,8 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, Pool, AsyncpgDriver]) """Initialize AsyncPG configuration. Args: - pool_config: Pool configuration (TypedDict or dict) - pool_instance: Existing pool to use + connection_config: Pool configuration (TypedDict or dict) + connection_instance: Existing pool to use migration_config: Migration settings statement_config: Statement processing overrides driver_features: Feature flags (TypedDict or dict) @@ -153,8 +153,8 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, Pool, AsyncpgDriver]) features_dict.setdefault("enable_pgvector", PGVECTOR_INSTALLED) super().__init__( - pool_config=dict(pool_config) if pool_config else {}, - pool_instance=pool_instance, + connection_config=dict(connection_config) if connection_config else {}, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config, driver_features=features_dict, @@ -214,18 +214,18 @@ class OracleSyncConfig(SyncDatabaseConfig[OracleSyncConnection, OracleSyncConnec def _create_pool(self) -> "OracleSyncConnectionPool": """Create sync connection pool.""" - return oracledb.create_pool(**self.pool_config) + return oracledb.create_pool(**self.connection_config) @contextlib.contextmanager def provide_connection(self) -> "Generator[OracleSyncConnection, None, None]": """Provide sync connection.""" - if self.pool_instance is None: - self.pool_instance = self.create_pool() - conn = self.pool_instance.acquire() + if self.connection_instance is None: + self.connection_instance = self.create_pool() + conn = self.connection_instance.acquire() try: yield conn finally: - self.pool_instance.release(conn) + self.connection_instance.release(conn) ``` #### Variation 2: External Connector (Cloud SQL, AlloyDB) @@ -560,7 +560,7 @@ class OracleSyncConfig(SyncDatabaseConfig): def _create_pool(self) -> "OracleSyncConnectionPool": """Create pool with session callback.""" - config = dict(self.pool_config) + config = dict(self.connection_config) # Register session callback if any handlers enabled if self.driver_features.get("enable_numpy_vectors", False): @@ -961,11 +961,11 @@ Use context managers for automatic resource cleanup with try/finally patterns. L **Implementation steps**: 1. Implement `_create_pool()` (private, actual creation) -2. Implement `create_pool()` (public, sets pool_instance) +2. Implement `create_pool()` (public, sets connection_instance) 3. Implement `provide_connection()` context manager 4. Implement `provide_session()` context manager (wraps connection) 5. Implement `_close_pool()` for cleanup -6. Set pool_instance to None after close +6. Set connection_instance to None after close ### Code Example @@ -976,31 +976,31 @@ from contextlib import asynccontextmanager class MinimalAsyncConfig: def __init__(self): - self.pool_instance = None + self.connection_instance = None async def _create_pool(self): """Create the actual pool.""" - return await library.create_pool(**self.pool_config) + return await library.create_pool(**self.connection_config) async def create_pool(self): """Public pool creation.""" - if self.pool_instance is None: - self.pool_instance = await self._create_pool() - return self.pool_instance + if self.connection_instance is None: + self.connection_instance = await self._create_pool() + return self.connection_instance @asynccontextmanager async def provide_connection(self): """Provide connection with automatic cleanup.""" - if self.pool_instance is None: - self.pool_instance = await self._create_pool() + if self.connection_instance is None: + self.connection_instance = await self._create_pool() connection = None try: - connection = await self.pool_instance.acquire() + connection = await self.connection_instance.acquire() yield connection finally: if connection is not None: - await self.pool_instance.release(connection) + await self.connection_instance.release(connection) ``` #### Full Example (AsyncPG) @@ -1015,10 +1015,10 @@ if TYPE_CHECKING: class AsyncpgConfig(AsyncDatabaseConfig): - def __init__(self, *, pool_config=None, pool_instance=None, ...): + def __init__(self, *, connection_config=None, connection_instance=None, ...): super().__init__( - pool_config=dict(pool_config) if pool_config else {}, - pool_instance=pool_instance, + connection_config=dict(connection_config) if connection_config else {}, + connection_instance=connection_instance, ... ) @@ -1028,7 +1028,7 @@ class AsyncpgConfig(AsyncDatabaseConfig): Returns: AsyncPG connection pool instance. """ - config = self._get_pool_config_dict() + config = self._get_connection_config_dict() config.setdefault("init", self._init_connection) return await asyncpg_create_pool(**config) @@ -1038,15 +1038,15 @@ class AsyncpgConfig(AsyncDatabaseConfig): Returns: AsyncPG connection pool. """ - if self.pool_instance is None: - self.pool_instance = await self._create_pool() - return self.pool_instance + if self.connection_instance is None: + self.connection_instance = await self._create_pool() + return self.connection_instance async def _close_pool(self) -> None: """Close the actual async connection pool.""" - if self.pool_instance: - await self.pool_instance.close() - self.pool_instance = None + if self.connection_instance: + await self.connection_instance.close() + self.connection_instance = None async def close_pool(self) -> None: """Public close method.""" @@ -1068,16 +1068,16 @@ class AsyncpgConfig(AsyncDatabaseConfig): Yields: AsyncPG connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self._create_pool() + if self.connection_instance is None: + self.connection_instance = await self._create_pool() connection = None try: - connection = await self.pool_instance.acquire() + connection = await self.connection_instance.acquire() yield connection finally: if connection is not None: - await self.pool_instance.release(connection) + await self.connection_instance.release(connection) @asynccontextmanager async def provide_session( @@ -1117,7 +1117,7 @@ from typing import Generator class OracleSyncConfig(SyncDatabaseConfig): def _create_pool(self) -> "OracleSyncConnectionPool": """Create sync connection pool.""" - config = dict(self.pool_config) + config = dict(self.connection_config) # Add session callback if handlers enabled if self.driver_features.get("enable_numpy_vectors", False): @@ -1127,15 +1127,15 @@ class OracleSyncConfig(SyncDatabaseConfig): def create_pool(self) -> "OracleSyncConnectionPool": """Public pool creation.""" - if self.pool_instance is None: - self.pool_instance = self._create_pool() - return self.pool_instance + if self.connection_instance is None: + self.connection_instance = self._create_pool() + return self.connection_instance def _close_pool(self) -> None: """Close sync pool.""" - if self.pool_instance: - self.pool_instance.close() - self.pool_instance = None + if self.connection_instance: + self.connection_instance.close() + self.connection_instance = None @contextlib.contextmanager def provide_connection(self) -> "Generator[OracleSyncConnection, None, None]": @@ -1144,16 +1144,16 @@ class OracleSyncConfig(SyncDatabaseConfig): Yields: Oracle Connection instance. """ - if self.pool_instance is None: - self.pool_instance = self._create_pool() + if self.connection_instance is None: + self.connection_instance = self._create_pool() conn = None try: - conn = self.pool_instance.acquire() + conn = self.connection_instance.acquire() yield conn finally: if conn is not None: - self.pool_instance.release(conn) + self.connection_instance.release(conn) @contextlib.contextmanager def provide_session( @@ -1181,24 +1181,24 @@ class OracleSyncConfig(SyncDatabaseConfig): ```python # BAD - No finally block async def provide_connection(self): - if not self.pool_instance: - self.pool_instance = await self._create_pool() - connection = await self.pool_instance.acquire() + if not self.connection_instance: + self.connection_instance = await self._create_pool() + connection = await self.connection_instance.acquire() yield connection - await self.pool_instance.release(connection) # Skipped if exception! + await self.connection_instance.release(connection) # Skipped if exception! # BAD - Creating pool in __init__ class BadConfig: - def __init__(self, pool_config): + def __init__(self, connection_config): # Pool created even if never used! - self.pool_instance = asyncio.run(create_pool(**pool_config)) + self.connection_instance = asyncio.run(create_pool(**connection_config)) # BAD - Manual connection management -connection = await config.pool_instance.acquire() +connection = await config.connection_instance.acquire() try: result = await connection.fetch(sql) finally: - await config.pool_instance.release(connection) + await config.connection_instance.release(connection) # Verbose and error-prone # GOOD - Context manager @@ -1220,9 +1220,9 @@ async def provide_pool(self) -> "Pool": Returns: Connection pool instance. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance ``` #### Variation 2: External Connector Cleanup @@ -1232,9 +1232,9 @@ For Google Cloud connectors requiring cleanup: ```python async def _close_pool(self) -> None: """Close pool and cleanup connectors.""" - if self.pool_instance: - await self.pool_instance.close() - self.pool_instance = None + if self.connection_instance: + await self.connection_instance.close() + self.connection_instance = None # Cleanup Cloud SQL connector if self._cloud_sql_connector is not None: @@ -1281,7 +1281,7 @@ async def _close_pool(self) -> None: **When NOT to use this pattern**: - Required features (put in config directly) -- Pool configuration (use pool_config) +- Pool configuration (use connection_config) - Statement-level overrides (use statement_config) ### Problem @@ -1487,25 +1487,25 @@ class OracleSyncConfig(SyncDatabaseConfig): ```python # Auto-detect (recommended) config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, # driver_features automatically enables pgvector if installed ) # Explicit disable config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, driver_features={"enable_pgvector": False} # Disable even if installed ) # Explicit enable (fails if not installed) config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, driver_features={"enable_pgvector": True} # Will fail if package missing ) # Custom serializer config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, driver_features={ "json_serializer": orjson.dumps, "json_deserializer": orjson.loads, @@ -1514,7 +1514,7 @@ config = AsyncpgConfig( # Cloud SQL connector config = AsyncpgConfig( - pool_config={"user": "myuser", "database": "mydb"}, + connection_config={"user": "myuser", "database": "mydb"}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance", @@ -1528,7 +1528,7 @@ config = AsyncpgConfig( ```python # BAD - Feature flags mixed with pool config class BadConfig: - def __init__(self, pool_config=None, enable_pgvector=True): + def __init__(self, connection_config=None, enable_pgvector=True): # Mixed concerns! pass diff --git a/sqlspec/adapters/adbc/config.py b/sqlspec/adapters/adbc/config.py index 5d503d6d8..5838f366b 100644 --- a/sqlspec/adapters/adbc/config.py +++ b/sqlspec/adapters/adbc/config.py @@ -114,6 +114,7 @@ def __init__( self, *, connection_config: AdbcConnectionParams | dict[str, Any] | None = None, + connection_instance: "Any" = None, migration_config: dict[str, Any] | None = None, statement_config: StatementConfig | None = None, driver_features: "AdbcDriverFeatures | dict[str, Any] | None" = None, @@ -125,6 +126,7 @@ def __init__( Args: connection_config: Connection configuration parameters + connection_instance: Pre-created connection instance to use instead of creating new one migration_config: Migration configuration statement_config: Default SQL statement configuration driver_features: Driver feature configuration (AdbcDriverFeatures) @@ -167,6 +169,7 @@ def __init__( super().__init__( connection_config=self.connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config, driver_features=processed_driver_features, diff --git a/sqlspec/adapters/aiosqlite/adk/store.py b/sqlspec/adapters/aiosqlite/adk/store.py index ab31a7b12..bf454daec 100644 --- a/sqlspec/adapters/aiosqlite/adk/store.py +++ b/sqlspec/adapters/aiosqlite/adk/store.py @@ -101,7 +101,7 @@ class AiosqliteADKStore(BaseAsyncADKStore["AiosqliteConfig"]): from sqlspec.adapters.aiosqlite.adk import AiosqliteADKStore config = AiosqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/aiosqlite/config.py b/sqlspec/adapters/aiosqlite/config.py index 68befc31e..5ec7a6bc0 100644 --- a/sqlspec/adapters/aiosqlite/config.py +++ b/sqlspec/adapters/aiosqlite/config.py @@ -89,8 +89,8 @@ class AiosqliteConfig(AsyncDatabaseConfig["AiosqliteConnection", AiosqliteConnec def __init__( self, *, - pool_config: "AiosqlitePoolParams | dict[str, Any] | None" = None, - pool_instance: "AiosqliteConnectionPool | None" = None, + connection_config: "AiosqlitePoolParams | dict[str, Any] | None" = None, + connection_instance: "AiosqliteConnectionPool | None" = None, migration_config: "dict[str, Any] | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "AiosqliteDriverFeatures | dict[str, Any] | None" = None, @@ -101,8 +101,8 @@ def __init__( """Initialize AioSQLite configuration. Args: - pool_config: Pool configuration parameters (TypedDict or dict) - pool_instance: Optional pre-configured connection pool instance. + connection_config: Connection and pool configuration parameters (TypedDict or dict) + connection_instance: Optional pre-configured connection pool instance. migration_config: Optional migration configuration. statement_config: Optional statement configuration. driver_features: Optional driver feature configuration. @@ -110,7 +110,7 @@ def __init__( extension_config: Extension-specific configuration (e.g., Litestar plugin settings) observability_config: Adapter-level observability overrides for lifecycle hooks and observers """ - config_dict = dict(pool_config) if pool_config else {} + config_dict = dict(connection_config) if connection_config else {} if "database" not in config_dict or config_dict["database"] == ":memory:": config_dict["database"] = "file::memory:?cache=shared" @@ -138,8 +138,8 @@ def __init__( base_statement_config = base_statement_config.replace(parameter_config=parameter_config) super().__init__( - pool_config=config_dict, - pool_instance=pool_instance, + connection_config=config_dict, + connection_instance=connection_instance, migration_config=migration_config, statement_config=base_statement_config, driver_features=processed_driver_features, @@ -154,7 +154,7 @@ def _get_pool_config_dict(self) -> "dict[str, Any]": Returns: Dictionary with pool parameters, filtering out None values. """ - config: dict[str, Any] = dict(self.pool_config) + config: dict[str, Any] = dict(self.connection_config) extras = config.pop("extra", {}) config.update(extras) return {k: v for k, v in config.items() if v is not None} @@ -177,7 +177,7 @@ def _get_connection_config_dict(self) -> "dict[str, Any]": "pool_timeout", "pool_recycle_seconds", } - return {k: v for k, v in self.pool_config.items() if k not in excluded_keys} + return {k: v for k, v in self.connection_config.items() if k not in excluded_keys} @asynccontextmanager async def provide_connection(self, *args: Any, **kwargs: Any) -> "AsyncGenerator[AiosqliteConnection, None]": @@ -190,9 +190,9 @@ async def provide_connection(self, *args: Any, **kwargs: Any) -> "AsyncGenerator Yields: An aiosqlite connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self._create_pool() - async with self.pool_instance.get_connection() as connection: + if self.connection_instance is None: + self.connection_instance = await self._create_pool() + async with self.connection_instance.get_connection() as connection: yield connection @asynccontextmanager @@ -259,8 +259,8 @@ def _register_type_adapters(self) -> None: async def close_pool(self) -> None: """Close the connection pool.""" - if self.pool_instance and not self.pool_instance.is_closed: - await self.pool_instance.close() + if self.connection_instance and not self.connection_instance.is_closed: + await self.connection_instance.close() async def create_connection(self) -> "AiosqliteConnection": """Create a single async connection from the pool. @@ -268,9 +268,9 @@ async def create_connection(self) -> "AiosqliteConnection": Returns: An aiosqlite connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self._create_pool() - pool_connection = await self.pool_instance.acquire() + if self.connection_instance is None: + self.connection_instance = await self._create_pool() + pool_connection = await self.connection_instance.acquire() return pool_connection.connection async def provide_pool(self) -> AiosqliteConnectionPool: @@ -279,9 +279,9 @@ async def provide_pool(self) -> AiosqliteConnectionPool: Returns: The async connection pool. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for aiosqlite types. diff --git a/sqlspec/adapters/asyncmy/adk/store.py b/sqlspec/adapters/asyncmy/adk/store.py index 6ac1f2e14..18b87a54c 100644 --- a/sqlspec/adapters/asyncmy/adk/store.py +++ b/sqlspec/adapters/asyncmy/adk/store.py @@ -39,7 +39,7 @@ class AsyncmyADKStore(BaseAsyncADKStore["AsyncmyConfig"]): from sqlspec.adapters.asyncmy.adk import AsyncmyADKStore config = AsyncmyConfig( - pool_config={"host": "localhost", ...}, + connection_config={"host": "localhost", ...}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/asyncmy/config.py b/sqlspec/adapters/asyncmy/config.py index c4c51cf06..d5f94b0a8 100644 --- a/sqlspec/adapters/asyncmy/config.py +++ b/sqlspec/adapters/asyncmy/config.py @@ -99,8 +99,8 @@ class AsyncmyConfig(AsyncDatabaseConfig[AsyncmyConnection, "AsyncmyPool", Asyncm def __init__( self, *, - pool_config: "AsyncmyPoolParams | dict[str, Any] | None" = None, - pool_instance: "AsyncmyPool | None" = None, + connection_config: "AsyncmyPoolParams | dict[str, Any] | None" = None, + connection_instance: "AsyncmyPool | None" = None, migration_config: dict[str, Any] | None = None, statement_config: "StatementConfig | None" = None, driver_features: "AsyncmyDriverFeatures | dict[str, Any] | None" = None, @@ -111,8 +111,8 @@ def __init__( """Initialize Asyncmy configuration. Args: - pool_config: Pool configuration parameters - pool_instance: Existing pool instance to use + connection_config: Connection and pool configuration parameters + connection_instance: Existing pool instance to use migration_config: Migration configuration statement_config: Statement configuration override driver_features: Driver feature configuration (TypedDict or dict) @@ -120,13 +120,13 @@ def __init__( extension_config: Extension-specific configuration (e.g., Litestar plugin settings) observability_config: Adapter-level observability overrides for lifecycle hooks and observers """ - processed_pool_config: dict[str, Any] = dict(pool_config) if pool_config else {} - if "extra" in processed_pool_config: - extras = processed_pool_config.pop("extra") - processed_pool_config.update(extras) + processed_connection_config: dict[str, Any] = dict(connection_config) if connection_config else {} + if "extra" in processed_connection_config: + extras = processed_connection_config.pop("extra") + processed_connection_config.update(extras) - processed_pool_config.setdefault("host", "localhost") - processed_pool_config.setdefault("port", 3306) + processed_connection_config.setdefault("host", "localhost") + processed_connection_config.setdefault("port", 3306) processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} serializer = processed_driver_features.setdefault("json_serializer", to_json) @@ -137,8 +137,8 @@ def __init__( ) super().__init__( - pool_config=processed_pool_config, - pool_instance=pool_instance, + connection_config=processed_connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=base_statement_config, driver_features=processed_driver_features, @@ -157,12 +157,12 @@ async def _create_pool(self) -> "AsyncmyPool": # pyright: ignore Future driver_features can be added here if needed (e.g., custom connection initialization, specialized type handling). """ - return await asyncmy.create_pool(**dict(self.pool_config)) # pyright: ignore + return await asyncmy.create_pool(**dict(self.connection_config)) # pyright: ignore async def _close_pool(self) -> None: """Close the actual async connection pool.""" - if self.pool_instance: - self.pool_instance.close() + if self.connection_instance: + self.connection_instance.close() async def close_pool(self) -> None: """Close the connection pool.""" @@ -174,9 +174,9 @@ async def create_connection(self) -> AsyncmyConnection: # pyright: ignore Returns: An Asyncmy connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self.create_pool() - return await self.pool_instance.acquire() # pyright: ignore + if self.connection_instance is None: + self.connection_instance = await self.create_pool() + return await self.connection_instance.acquire() # pyright: ignore @asynccontextmanager async def provide_connection(self, *args: Any, **kwargs: Any) -> AsyncGenerator[AsyncmyConnection, None]: # pyright: ignore @@ -189,9 +189,9 @@ async def provide_connection(self, *args: Any, **kwargs: Any) -> AsyncGenerator[ Yields: An Asyncmy connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self.create_pool() - async with self.pool_instance.acquire() as connection: # pyright: ignore + if self.connection_instance is None: + self.connection_instance = await self.create_pool() + async with self.connection_instance.acquire() as connection: # pyright: ignore yield connection @asynccontextmanager @@ -221,9 +221,9 @@ async def provide_pool(self, *args: Any, **kwargs: Any) -> "Pool": # pyright: i Returns: The async connection pool. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for Asyncmy types. diff --git a/sqlspec/adapters/asyncmy/litestar/store.py b/sqlspec/adapters/asyncmy/litestar/store.py index ee5289ffe..fc6adab8b 100644 --- a/sqlspec/adapters/asyncmy/litestar/store.py +++ b/sqlspec/adapters/asyncmy/litestar/store.py @@ -34,7 +34,7 @@ class AsyncmyStore(BaseSQLSpecStore["AsyncmyConfig"]): from sqlspec.adapters.asyncmy import AsyncmyConfig from sqlspec.adapters.asyncmy.litestar.store import AsyncmyStore - config = AsyncmyConfig(pool_config={"host": "localhost", ...}) + config = AsyncmyConfig(connection_config={"host": "localhost", ...}) store = AsyncmyStore(config) await store.create_table() diff --git a/sqlspec/adapters/asyncpg/adk/store.py b/sqlspec/adapters/asyncpg/adk/store.py index 8dbac95d8..e8a34cf1a 100644 --- a/sqlspec/adapters/asyncpg/adk/store.py +++ b/sqlspec/adapters/asyncpg/adk/store.py @@ -43,7 +43,7 @@ class AsyncpgADKStore(BaseAsyncADKStore[AsyncConfigT]): from sqlspec.adapters.asyncpg.adk import AsyncpgADKStore config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/asyncpg/config.py b/sqlspec/adapters/asyncpg/config.py index 21c95d7ff..a93d0a67f 100644 --- a/sqlspec/adapters/asyncpg/config.py +++ b/sqlspec/adapters/asyncpg/config.py @@ -102,7 +102,7 @@ class AsyncpgDriverFeatures(TypedDict): Required when enable_cloud_sql is True. cloud_sql_enable_iam_auth: Enable IAM database authentication. Defaults to False for passwordless authentication. - When False, requires user/password in pool_config. + When False, requires user/password in connection_config. cloud_sql_ip_type: IP address type for connection. Options: "PUBLIC", "PRIVATE", "PSC" Defaults to "PRIVATE". @@ -149,8 +149,8 @@ class AsyncpgConfig(AsyncDatabaseConfig[AsyncpgConnection, "Pool[Record]", Async def __init__( self, *, - pool_config: "AsyncpgPoolConfig | dict[str, Any] | None" = None, - pool_instance: "Pool[Record] | None" = None, + connection_config: "AsyncpgPoolConfig | dict[str, Any] | None" = None, + connection_instance: "Pool[Record] | None" = None, migration_config: "dict[str, Any] | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "AsyncpgDriverFeatures | dict[str, Any] | None" = None, @@ -161,8 +161,8 @@ def __init__( """Initialize AsyncPG configuration. Args: - pool_config: Pool configuration parameters (TypedDict or dict) - pool_instance: Existing pool instance to use + connection_config: Connection and pool configuration parameters (TypedDict or dict) + connection_instance: Existing pool instance to use migration_config: Migration configuration statement_config: Statement configuration override driver_features: Driver features configuration (TypedDict or dict) @@ -184,8 +184,8 @@ def __init__( ) super().__init__( - pool_config=dict(pool_config) if pool_config else {}, - pool_instance=pool_instance, + connection_config=dict(connection_config) if connection_config else {}, + connection_instance=connection_instance, migration_config=migration_config, statement_config=base_statement_config, driver_features=features_dict, @@ -247,7 +247,7 @@ def _get_pool_config_dict(self) -> "dict[str, Any]": Returns: Dictionary with pool parameters, filtering out None values. """ - config: dict[str, Any] = dict(self.pool_config) + config: dict[str, Any] = dict(self.connection_config) extras = config.pop("extra", {}) config.update(extras) return {k: v for k, v in config.items() if v is not None} @@ -357,8 +357,8 @@ async def _init_connection(self, connection: "AsyncpgConnection") -> None: async def _close_pool(self) -> None: """Close the actual async connection pool and cleanup connectors.""" - if self.pool_instance: - await self.pool_instance.close() + if self.connection_instance: + await self.connection_instance.close() if self._cloud_sql_connector is not None: await self._cloud_sql_connector.close_async() @@ -378,9 +378,9 @@ async def create_connection(self) -> "AsyncpgConnection": Returns: An AsyncPG connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self._create_pool() - return await self.pool_instance.acquire() + if self.connection_instance is None: + self.connection_instance = await self._create_pool() + return await self.connection_instance.acquire() @asynccontextmanager async def provide_connection(self, *args: Any, **kwargs: Any) -> "AsyncGenerator[AsyncpgConnection, None]": @@ -393,15 +393,15 @@ async def provide_connection(self, *args: Any, **kwargs: Any) -> "AsyncGenerator Yields: An AsyncPG connection instance. """ - if self.pool_instance is None: - self.pool_instance = await self._create_pool() + if self.connection_instance is None: + self.connection_instance = await self._create_pool() connection = None try: - connection = await self.pool_instance.acquire() + connection = await self.connection_instance.acquire() yield connection finally: if connection is not None: - await self.pool_instance.release(connection) + await self.connection_instance.release(connection) @asynccontextmanager async def provide_session( @@ -430,9 +430,9 @@ async def provide_pool(self, *args: Any, **kwargs: Any) -> "Pool[Record]": Returns: The async connection pool. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for AsyncPG types. diff --git a/sqlspec/adapters/asyncpg/litestar/store.py b/sqlspec/adapters/asyncpg/litestar/store.py index aaade1519..e5c403640 100644 --- a/sqlspec/adapters/asyncpg/litestar/store.py +++ b/sqlspec/adapters/asyncpg/litestar/store.py @@ -32,7 +32,7 @@ class AsyncpgStore(BaseSQLSpecStore["AsyncpgConfig"]): from sqlspec.adapters.asyncpg.litestar.store import AsyncpgStore config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={"litestar": {"session_table": "my_sessions"}} ) store = AsyncpgStore(config) diff --git a/sqlspec/adapters/bigquery/config.py b/sqlspec/adapters/bigquery/config.py index 0dc0fa45c..3ac5cc766 100644 --- a/sqlspec/adapters/bigquery/config.py +++ b/sqlspec/adapters/bigquery/config.py @@ -117,6 +117,7 @@ def __init__( self, *, connection_config: "BigQueryConnectionParams | dict[str, Any] | None" = None, + connection_instance: "Any" = None, migration_config: dict[str, Any] | None = None, statement_config: "StatementConfig | None" = None, driver_features: "BigQueryDriverFeatures | dict[str, Any] | None" = None, @@ -128,6 +129,7 @@ def __init__( Args: connection_config: Connection configuration parameters + connection_instance: Pre-created BigQuery Client instance to use instead of creating new one migration_config: Migration configuration statement_config: Statement configuration override driver_features: BigQuery-specific driver features @@ -167,6 +169,7 @@ def _wrap_hook(context: dict[str, Any]) -> None: super().__init__( connection_config=self.connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=base_statement_config, driver_features=processed_driver_features, diff --git a/sqlspec/adapters/duckdb/config.py b/sqlspec/adapters/duckdb/config.py index 59c1ad492..102ad7c33 100644 --- a/sqlspec/adapters/duckdb/config.py +++ b/sqlspec/adapters/duckdb/config.py @@ -187,7 +187,7 @@ class DuckDBConfig(SyncDatabaseConfig[DuckDBConnection, DuckDBConnectionPool, Du ... return msgspec.json.encode(obj).decode("utf-8") >>> >>> config = DuckDBConfig( - ... pool_config={"database": ":memory:"}, + ... connection_config={"database": ":memory:"}, ... driver_features={ ... "json_serializer": custom_json, ... "enable_uuid_conversion": False, @@ -207,8 +207,8 @@ class DuckDBConfig(SyncDatabaseConfig[DuckDBConnection, DuckDBConnectionPool, Du def __init__( self, *, - pool_config: "DuckDBPoolParams | dict[str, Any] | None" = None, - pool_instance: "DuckDBConnectionPool | None" = None, + connection_config: "DuckDBPoolParams | dict[str, Any] | None" = None, + connection_instance: "DuckDBConnectionPool | None" = None, migration_config: dict[str, Any] | None = None, statement_config: "StatementConfig | None" = None, driver_features: "DuckDBDriverFeatures | dict[str, Any] | None" = None, @@ -219,8 +219,8 @@ def __init__( """Initialize DuckDB configuration. Args: - pool_config: Pool configuration parameters - pool_instance: Pre-created pool instance + connection_config: Connection and pool configuration parameters + connection_instance: Pre-created pool instance migration_config: Migration configuration statement_config: Statement configuration override driver_features: DuckDB-specific driver features including json_serializer @@ -229,17 +229,17 @@ def __init__( extension_config: Extension-specific configuration (e.g., Litestar plugin settings) observability_config: Adapter-level observability overrides for lifecycle hooks and observers """ - if pool_config is None: - pool_config = {} - pool_config.setdefault("database", ":memory:shared_db") + if connection_config is None: + connection_config = {} + connection_config.setdefault("database", ":memory:shared_db") - if pool_config.get("database") in {":memory:", ""}: - pool_config["database"] = ":memory:shared_db" + if connection_config.get("database") in {":memory:", ""}: + connection_config["database"] = ":memory:shared_db" extension_flags: dict[str, Any] = {} - for key in tuple(pool_config.keys()): + for key in tuple(connection_config.keys()): if key in EXTENSION_FLAG_KEYS: - extension_flags[key] = pool_config.pop(key) # type: ignore[misc] + extension_flags[key] = connection_config.pop(key) # type: ignore[misc] processed_features: dict[str, Any] = dict(driver_features) if driver_features else {} user_connection_hook = cast( @@ -271,8 +271,8 @@ def _wrap_lifecycle_hook(context: dict[str, Any]) -> None: super().__init__( bind_key=bind_key, - pool_config=dict(pool_config), - pool_instance=pool_instance, + connection_config=dict(connection_config), + connection_instance=connection_instance, migration_config=migration_config, statement_config=base_statement_config, driver_features=processed_features, @@ -284,7 +284,7 @@ def _get_connection_config_dict(self) -> "dict[str, Any]": """Get connection configuration as plain dict for pool creation.""" return { k: v - for k, v in self.pool_config.items() + for k, v in self.connection_config.items() if v is not None and k not in {"pool_min_size", "pool_max_size", "pool_timeout", "pool_recycle_seconds", "extra"} } @@ -305,13 +305,13 @@ def _create_pool(self) -> DuckDBConnectionPool: extensions=extensions_dicts, extension_flags=extension_flags_dict, secrets=secrets_dicts, - **self.pool_config, + **self.connection_config, ) def _close_pool(self) -> None: """Close the connection pool.""" - if self.pool_instance: - self.pool_instance.close() + if self.connection_instance: + self.connection_instance.close() def create_connection(self) -> DuckDBConnection: """Get a DuckDB connection from the pool. diff --git a/sqlspec/adapters/oracledb/adk/store.py b/sqlspec/adapters/oracledb/adk/store.py index 35eb4d2bd..bd5d91051 100644 --- a/sqlspec/adapters/oracledb/adk/store.py +++ b/sqlspec/adapters/oracledb/adk/store.py @@ -121,7 +121,7 @@ class OracleAsyncADKStore(BaseAsyncADKStore["OracleAsyncConfig"]): from sqlspec.adapters.oracledb.adk import OracleAsyncADKStore config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "adk": { "session_table": "my_sessions", @@ -881,7 +881,7 @@ class OracleSyncADKStore(BaseSyncADKStore["OracleSyncConfig"]): from sqlspec.adapters.oracledb.adk import OracleSyncADKStore config = OracleSyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/oracledb/config.py b/sqlspec/adapters/oracledb/config.py index 3e0a5d1bb..703d51877 100644 --- a/sqlspec/adapters/oracledb/config.py +++ b/sqlspec/adapters/oracledb/config.py @@ -129,8 +129,8 @@ class OracleSyncConfig(SyncDatabaseConfig[OracleSyncConnection, "OracleSyncConne def __init__( self, *, - pool_config: "OraclePoolParams | dict[str, Any] | None" = None, - pool_instance: "OracleSyncConnectionPool | None" = None, + connection_config: "OraclePoolParams | dict[str, Any] | None" = None, + connection_instance: "OracleSyncConnectionPool | None" = None, migration_config: dict[str, Any] | None = None, statement_config: "StatementConfig | None" = None, driver_features: "OracleDriverFeatures | dict[str, Any] | None" = None, @@ -140,8 +140,8 @@ def __init__( """Initialize Oracle synchronous configuration. Args: - pool_config: Pool configuration parameters. - pool_instance: Existing pool instance to use. + connection_config: Connection and pool configuration parameters. + connection_instance: Existing pool instance to use. migration_config: Migration configuration. statement_config: Default SQL statement configuration. driver_features: Optional driver feature configuration (TypedDict or dict). @@ -149,10 +149,10 @@ def __init__( extension_config: Extension-specific configuration (e.g., Litestar plugin settings). """ - processed_pool_config: dict[str, Any] = dict(pool_config) if pool_config else {} - if "extra" in processed_pool_config: - extras = processed_pool_config.pop("extra") - processed_pool_config.update(extras) + processed_connection_config: dict[str, Any] = dict(connection_config) if connection_config else {} + if "extra" in processed_connection_config: + extras = processed_connection_config.pop("extra") + processed_connection_config.update(extras) statement_config = statement_config or oracledb_statement_config processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} @@ -161,8 +161,8 @@ def __init__( processed_driver_features.setdefault("enable_uuid_binary", True) super().__init__( - pool_config=processed_pool_config, - pool_instance=pool_instance, + connection_config=processed_connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config, driver_features=processed_driver_features, @@ -172,7 +172,7 @@ def __init__( def _create_pool(self) -> "OracleSyncConnectionPool": """Create the actual connection pool.""" - config = dict(self.pool_config) + config = dict(self.connection_config) needs_session_callback = self.driver_features.get("enable_numpy_vectors", False) or self.driver_features.get( "enable_uuid_binary", False @@ -200,8 +200,8 @@ def _init_connection(self, connection: "OracleSyncConnection", tag: str) -> None def _close_pool(self) -> None: """Close the actual connection pool.""" - if self.pool_instance: - self.pool_instance.close() + if self.connection_instance: + self.connection_instance.close() def create_connection(self) -> "OracleSyncConnection": """Create a single connection (not from pool). @@ -209,9 +209,9 @@ def create_connection(self) -> "OracleSyncConnection": Returns: An Oracle Connection instance. """ - if self.pool_instance is None: - self.pool_instance = self.create_pool() - return self.pool_instance.acquire() + if self.connection_instance is None: + self.connection_instance = self.create_pool() + return self.connection_instance.acquire() @contextlib.contextmanager def provide_connection(self) -> "Generator[OracleSyncConnection, None, None]": @@ -220,13 +220,13 @@ def provide_connection(self) -> "Generator[OracleSyncConnection, None, None]": Yields: An Oracle Connection instance. """ - if self.pool_instance is None: - self.pool_instance = self.create_pool() - conn = self.pool_instance.acquire() + if self.connection_instance is None: + self.connection_instance = self.create_pool() + conn = self.connection_instance.acquire() try: yield conn finally: - self.pool_instance.release(conn) + self.connection_instance.release(conn) @contextlib.contextmanager def provide_session( @@ -257,9 +257,9 @@ def provide_pool(self) -> "OracleSyncConnectionPool": Returns: The connection pool. """ - if not self.pool_instance: - self.pool_instance = self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for OracleDB types. @@ -306,8 +306,8 @@ class OracleAsyncConfig(AsyncDatabaseConfig[OracleAsyncConnection, "OracleAsyncC def __init__( self, *, - pool_config: "OraclePoolParams | dict[str, Any] | None" = None, - pool_instance: "OracleAsyncConnectionPool | None" = None, + connection_config: "OraclePoolParams | dict[str, Any] | None" = None, + connection_instance: "OracleAsyncConnectionPool | None" = None, migration_config: dict[str, Any] | None = None, statement_config: "StatementConfig | None" = None, driver_features: "OracleDriverFeatures | dict[str, Any] | None" = None, @@ -317,8 +317,8 @@ def __init__( """Initialize Oracle asynchronous configuration. Args: - pool_config: Pool configuration parameters. - pool_instance: Existing pool instance to use. + connection_config: Connection and pool configuration parameters. + connection_instance: Existing pool instance to use. migration_config: Migration configuration. statement_config: Default SQL statement configuration. driver_features: Optional driver feature configuration (TypedDict or dict). @@ -326,10 +326,10 @@ def __init__( extension_config: Extension-specific configuration (e.g., Litestar plugin settings). """ - processed_pool_config: dict[str, Any] = dict(pool_config) if pool_config else {} - if "extra" in processed_pool_config: - extras = processed_pool_config.pop("extra") - processed_pool_config.update(extras) + processed_connection_config: dict[str, Any] = dict(connection_config) if connection_config else {} + if "extra" in processed_connection_config: + extras = processed_connection_config.pop("extra") + processed_connection_config.update(extras) processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} processed_driver_features.setdefault("enable_numpy_vectors", NUMPY_INSTALLED) @@ -337,8 +337,8 @@ def __init__( processed_driver_features.setdefault("enable_uuid_binary", True) super().__init__( - pool_config=processed_pool_config, - pool_instance=pool_instance, + connection_config=processed_connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config or oracledb_statement_config, driver_features=processed_driver_features, @@ -348,7 +348,7 @@ def __init__( async def _create_pool(self) -> "OracleAsyncConnectionPool": """Create the actual async connection pool.""" - config = dict(self.pool_config) + config = dict(self.connection_config) needs_session_callback = self.driver_features.get("enable_numpy_vectors", False) or self.driver_features.get( "enable_uuid_binary", False @@ -376,8 +376,8 @@ async def _init_connection(self, connection: "OracleAsyncConnection", tag: str) async def _close_pool(self) -> None: """Close the actual async connection pool.""" - if self.pool_instance: - await self.pool_instance.close() + if self.connection_instance: + await self.connection_instance.close() async def close_pool(self) -> None: """Close the connection pool.""" @@ -389,9 +389,9 @@ async def create_connection(self) -> OracleAsyncConnection: Returns: An Oracle AsyncConnection instance. """ - if self.pool_instance is None: - self.pool_instance = await self.create_pool() - return cast("OracleAsyncConnection", await self.pool_instance.acquire()) + if self.connection_instance is None: + self.connection_instance = await self.create_pool() + return cast("OracleAsyncConnection", await self.connection_instance.acquire()) @asynccontextmanager async def provide_connection(self) -> "AsyncGenerator[OracleAsyncConnection, None]": @@ -400,13 +400,13 @@ async def provide_connection(self) -> "AsyncGenerator[OracleAsyncConnection, Non Yields: An Oracle AsyncConnection instance. """ - if self.pool_instance is None: - self.pool_instance = await self.create_pool() - conn = await self.pool_instance.acquire() + if self.connection_instance is None: + self.connection_instance = await self.create_pool() + conn = await self.connection_instance.acquire() try: yield conn finally: - await self.pool_instance.release(conn) + await self.connection_instance.release(conn) @asynccontextmanager async def provide_session( @@ -437,9 +437,9 @@ async def provide_pool(self) -> "OracleAsyncConnectionPool": Returns: The async connection pool. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for OracleDB async types. diff --git a/sqlspec/adapters/oracledb/litestar/store.py b/sqlspec/adapters/oracledb/litestar/store.py index 9d5f039d8..1305e195f 100644 --- a/sqlspec/adapters/oracledb/litestar/store.py +++ b/sqlspec/adapters/oracledb/litestar/store.py @@ -36,7 +36,7 @@ class OracleAsyncStore(BaseSQLSpecStore["OracleAsyncConfig"]): from sqlspec.adapters.oracledb.litestar.store import OracleAsyncStore config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "litestar": { "session_table": "my_sessions", @@ -404,7 +404,7 @@ class OracleSyncStore(BaseSQLSpecStore["OracleSyncConfig"]): from sqlspec.adapters.oracledb.litestar.store import OracleSyncStore config = OracleSyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "litestar": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/psqlpy/adk/store.py b/sqlspec/adapters/psqlpy/adk/store.py index b3a00c9ed..4384fb9b7 100644 --- a/sqlspec/adapters/psqlpy/adk/store.py +++ b/sqlspec/adapters/psqlpy/adk/store.py @@ -42,7 +42,7 @@ class PsqlpyADKStore(BaseAsyncADKStore["PsqlpyConfig"]): from sqlspec.adapters.psqlpy.adk import PsqlpyADKStore config = PsqlpyConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/psqlpy/config.py b/sqlspec/adapters/psqlpy/config.py index 790534b58..be7217af7 100644 --- a/sqlspec/adapters/psqlpy/config.py +++ b/sqlspec/adapters/psqlpy/config.py @@ -113,8 +113,8 @@ class PsqlpyConfig(AsyncDatabaseConfig[PsqlpyConnection, ConnectionPool, PsqlpyD def __init__( self, *, - pool_config: PsqlpyPoolParams | dict[str, Any] | None = None, - pool_instance: ConnectionPool | None = None, + connection_config: PsqlpyPoolParams | dict[str, Any] | None = None, + connection_instance: ConnectionPool | None = None, migration_config: dict[str, Any] | None = None, statement_config: StatementConfig | None = None, driver_features: "PsqlpyDriverFeatures | dict[str, Any] | None" = None, @@ -124,18 +124,18 @@ def __init__( """Initialize Psqlpy configuration. Args: - pool_config: Pool configuration parameters. - pool_instance: Existing connection pool instance to use. + connection_config: Connection and pool configuration parameters. + connection_instance: Existing connection pool instance to use. migration_config: Migration configuration. statement_config: SQL statement configuration. driver_features: Driver feature configuration (TypedDict or dict). bind_key: Optional unique identifier for this configuration. extension_config: Extension-specific configuration (e.g., Litestar plugin settings). """ - processed_pool_config: dict[str, Any] = dict(pool_config) if pool_config else {} - if "extra" in processed_pool_config: - extras = processed_pool_config.pop("extra") - processed_pool_config.update(extras) + processed_connection_config: dict[str, Any] = dict(connection_config) if connection_config else {} + if "extra" in processed_connection_config: + extras = processed_connection_config.pop("extra") + processed_connection_config.update(extras) processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} serializer = processed_driver_features.get("json_serializer") @@ -144,8 +144,8 @@ def __init__( processed_driver_features.setdefault("enable_pgvector", PGVECTOR_INSTALLED) super().__init__( - pool_config=processed_pool_config, - pool_instance=pool_instance, + connection_config=processed_connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config or build_psqlpy_statement_config(json_serializer=serializer_callable), driver_features=processed_driver_features, @@ -159,7 +159,7 @@ def _get_pool_config_dict(self) -> dict[str, Any]: Returns: Dictionary with pool parameters, filtering out None values. """ - return {k: v for k, v in self.pool_config.items() if v is not None} + return {k: v for k, v in self.connection_config.items() if v is not None} async def _create_pool(self) -> "ConnectionPool": """Create the actual async connection pool.""" @@ -177,13 +177,13 @@ async def _create_pool(self) -> "ConnectionPool": async def _close_pool(self) -> None: """Close the actual async connection pool.""" - if not self.pool_instance: + if not self.connection_instance: return logger.info("Closing psqlpy connection pool", extra={"adapter": "psqlpy"}) try: - self.pool_instance.close() + self.connection_instance.close() logger.info("Psqlpy connection pool closed successfully", extra={"adapter": "psqlpy"}) except Exception as e: logger.exception("Failed to close psqlpy connection pool", extra={"adapter": "psqlpy", "error": str(e)}) @@ -199,10 +199,10 @@ async def create_connection(self) -> "PsqlpyConnection": Returns: A psqlpy Connection instance. """ - if not self.pool_instance: - self.pool_instance = await self._create_pool() + if not self.connection_instance: + self.connection_instance = await self._create_pool() - return await self.pool_instance.connection() + return await self.connection_instance.connection() @asynccontextmanager async def provide_connection(self, *args: Any, **kwargs: Any) -> AsyncGenerator[PsqlpyConnection, None]: @@ -215,10 +215,10 @@ async def provide_connection(self, *args: Any, **kwargs: Any) -> AsyncGenerator[ Yields: A psqlpy Connection instance. """ - if not self.pool_instance: - self.pool_instance = await self._create_pool() + if not self.connection_instance: + self.connection_instance = await self._create_pool() - async with self.pool_instance.acquire() as conn: + async with self.connection_instance.acquire() as conn: yield conn @asynccontextmanager @@ -249,9 +249,9 @@ async def provide_pool(self, *args: Any, **kwargs: Any) -> ConnectionPool: Returns: The async connection pool. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for Psqlpy types. diff --git a/sqlspec/adapters/psqlpy/litestar/store.py b/sqlspec/adapters/psqlpy/litestar/store.py index 804daa28e..308dfd7e8 100644 --- a/sqlspec/adapters/psqlpy/litestar/store.py +++ b/sqlspec/adapters/psqlpy/litestar/store.py @@ -32,7 +32,7 @@ class PsqlpyStore(BaseSQLSpecStore["PsqlpyConfig"]): from sqlspec.adapters.psqlpy import PsqlpyConfig from sqlspec.adapters.psqlpy.litestar.store import PsqlpyStore - config = PsqlpyConfig(pool_config={"dsn": "postgresql://..."}) + config = PsqlpyConfig(connection_config={"dsn": "postgresql://..."}) store = PsqlpyStore(config) await store.create_table() """ diff --git a/sqlspec/adapters/psycopg/adk/store.py b/sqlspec/adapters/psycopg/adk/store.py index 8395cda11..6ecad5631 100644 --- a/sqlspec/adapters/psycopg/adk/store.py +++ b/sqlspec/adapters/psycopg/adk/store.py @@ -42,7 +42,7 @@ class PsycopgAsyncADKStore(BaseAsyncADKStore["PsycopgAsyncConfig"]): from sqlspec.adapters.psycopg.adk import PsycopgAsyncADKStore config = PsycopgAsyncConfig( - pool_config={"conninfo": "postgresql://..."}, + connection_config={"conninfo": "postgresql://..."}, extension_config={ "adk": { "session_table": "my_sessions", @@ -497,7 +497,7 @@ class PsycopgSyncADKStore(BaseSyncADKStore["PsycopgSyncConfig"]): from sqlspec.adapters.psycopg.adk import PsycopgSyncADKStore config = PsycopgSyncConfig( - pool_config={"conninfo": "postgresql://..."}, + connection_config={"conninfo": "postgresql://..."}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/adapters/psycopg/config.py b/sqlspec/adapters/psycopg/config.py index e363bdd34..7924ae417 100644 --- a/sqlspec/adapters/psycopg/config.py +++ b/sqlspec/adapters/psycopg/config.py @@ -113,8 +113,8 @@ class PsycopgSyncConfig(SyncDatabaseConfig[PsycopgSyncConnection, ConnectionPool def __init__( self, *, - pool_config: "PsycopgPoolParams | dict[str, Any] | None" = None, - pool_instance: "ConnectionPool | None" = None, + connection_config: "PsycopgPoolParams | dict[str, Any] | None" = None, + connection_instance: "ConnectionPool | None" = None, migration_config: dict[str, Any] | None = None, statement_config: "StatementConfig | None" = None, driver_features: "dict[str, Any] | None" = None, @@ -124,18 +124,18 @@ def __init__( """Initialize Psycopg synchronous configuration. Args: - pool_config: Pool configuration parameters (TypedDict or dict) - pool_instance: Existing pool instance to use + connection_config: Connection and pool configuration parameters (TypedDict or dict) + connection_instance: Existing pool instance to use migration_config: Migration configuration statement_config: Default SQL statement configuration driver_features: Optional driver feature configuration bind_key: Optional unique identifier for this configuration extension_config: Extension-specific configuration (e.g., Litestar plugin settings) """ - processed_pool_config: dict[str, Any] = dict(pool_config) if pool_config else {} - if "extra" in processed_pool_config: - extras = processed_pool_config.pop("extra") - processed_pool_config.update(extras) + processed_connection_config: dict[str, Any] = dict(connection_config) if connection_config else {} + if "extra" in processed_connection_config: + extras = processed_connection_config.pop("extra") + processed_connection_config.update(extras) processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} serializer = cast("Callable[[Any], str]", processed_driver_features.get("json_serializer", to_json)) @@ -143,8 +143,8 @@ def __init__( processed_driver_features.setdefault("enable_pgvector", PGVECTOR_INSTALLED) super().__init__( - pool_config=processed_pool_config, - pool_instance=pool_instance, + connection_config=processed_connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config or build_psycopg_statement_config(json_serializer=serializer), driver_features=processed_driver_features, @@ -157,7 +157,7 @@ def _create_pool(self) -> "ConnectionPool": logger.info("Creating Psycopg connection pool", extra={"adapter": "psycopg"}) try: - all_config = dict(self.pool_config) + all_config = dict(self.connection_config) pool_parameters = { "min_size": all_config.pop("min_size", 4), @@ -201,19 +201,19 @@ def configure_connection(conn: "PsycopgSyncConnection") -> None: def _close_pool(self) -> None: """Close the actual connection pool.""" - if not self.pool_instance: + if not self.connection_instance: return logger.info("Closing Psycopg connection pool", extra={"adapter": "psycopg"}) try: - self.pool_instance.close() + self.connection_instance.close() logger.info("Psycopg connection pool closed successfully", extra={"adapter": "psycopg"}) except Exception as e: logger.exception("Failed to close Psycopg connection pool", extra={"adapter": "psycopg", "error": str(e)}) raise finally: - self.pool_instance = None + self.connection_instance = None def create_connection(self) -> "PsycopgSyncConnection": """Create a single connection (not from pool). @@ -221,9 +221,9 @@ def create_connection(self) -> "PsycopgSyncConnection": Returns: A psycopg Connection instance configured with DictRow. """ - if self.pool_instance is None: - self.pool_instance = self.create_pool() - return cast("PsycopgSyncConnection", self.pool_instance.getconn()) # pyright: ignore + if self.connection_instance is None: + self.connection_instance = self.create_pool() + return cast("PsycopgSyncConnection", self.connection_instance.getconn()) # pyright: ignore @contextlib.contextmanager def provide_connection(self, *args: Any, **kwargs: Any) -> "Generator[PsycopgSyncConnection, None, None]": @@ -236,8 +236,8 @@ def provide_connection(self, *args: Any, **kwargs: Any) -> "Generator[PsycopgSyn Yields: A psycopg Connection instance. """ - if self.pool_instance: - with self.pool_instance.connection() as conn: + if self.connection_instance: + with self.connection_instance.connection() as conn: yield conn # type: ignore[misc] else: conn = self.create_connection() # type: ignore[assignment] @@ -273,9 +273,9 @@ def provide_pool(self, *args: Any, **kwargs: Any) -> "ConnectionPool": Returns: The connection pool. """ - if not self.pool_instance: - self.pool_instance = self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for Psycopg types. @@ -312,8 +312,8 @@ class PsycopgAsyncConfig(AsyncDatabaseConfig[PsycopgAsyncConnection, AsyncConnec def __init__( self, *, - pool_config: "PsycopgPoolParams | dict[str, Any] | None" = None, - pool_instance: "AsyncConnectionPool | None" = None, + connection_config: "PsycopgPoolParams | dict[str, Any] | None" = None, + connection_instance: "AsyncConnectionPool | None" = None, migration_config: "dict[str, Any] | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "dict[str, Any] | None" = None, @@ -323,18 +323,18 @@ def __init__( """Initialize Psycopg asynchronous configuration. Args: - pool_config: Pool configuration parameters (TypedDict or dict) - pool_instance: Existing pool instance to use + connection_config: Connection and pool configuration parameters (TypedDict or dict) + connection_instance: Existing pool instance to use migration_config: Migration configuration statement_config: Default SQL statement configuration driver_features: Optional driver feature configuration bind_key: Optional unique identifier for this configuration extension_config: Extension-specific configuration (e.g., Litestar plugin settings) """ - processed_pool_config: dict[str, Any] = dict(pool_config) if pool_config else {} - if "extra" in processed_pool_config: - extras = processed_pool_config.pop("extra") - processed_pool_config.update(extras) + processed_connection_config: dict[str, Any] = dict(connection_config) if connection_config else {} + if "extra" in processed_connection_config: + extras = processed_connection_config.pop("extra") + processed_connection_config.update(extras) processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} serializer = cast("Callable[[Any], str]", processed_driver_features.get("json_serializer", to_json)) @@ -342,8 +342,8 @@ def __init__( processed_driver_features.setdefault("enable_pgvector", PGVECTOR_INSTALLED) super().__init__( - pool_config=processed_pool_config, - pool_instance=pool_instance, + connection_config=processed_connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=statement_config or build_psycopg_statement_config(json_serializer=serializer), driver_features=processed_driver_features, @@ -354,7 +354,7 @@ def __init__( async def _create_pool(self) -> "AsyncConnectionPool": """Create the actual async connection pool.""" - all_config = dict(self.pool_config) + all_config = dict(self.connection_config) pool_parameters = { "min_size": all_config.pop("min_size", 4), @@ -396,13 +396,13 @@ async def configure_connection(conn: "PsycopgAsyncConnection") -> None: async def _close_pool(self) -> None: """Close the actual async connection pool.""" - if not self.pool_instance: + if not self.connection_instance: return try: - await self.pool_instance.close() + await self.connection_instance.close() finally: - self.pool_instance = None + self.connection_instance = None async def create_connection(self) -> "PsycopgAsyncConnection": # pyright: ignore """Create a single async connection (not from pool). @@ -410,9 +410,9 @@ async def create_connection(self) -> "PsycopgAsyncConnection": # pyright: ignor Returns: A psycopg AsyncConnection instance configured with DictRow. """ - if self.pool_instance is None: - self.pool_instance = await self.create_pool() - return cast("PsycopgAsyncConnection", await self.pool_instance.getconn()) # pyright: ignore + if self.connection_instance is None: + self.connection_instance = await self.create_pool() + return cast("PsycopgAsyncConnection", await self.connection_instance.getconn()) # pyright: ignore @asynccontextmanager async def provide_connection(self, *args: Any, **kwargs: Any) -> "AsyncGenerator[PsycopgAsyncConnection, None]": # pyright: ignore @@ -425,8 +425,8 @@ async def provide_connection(self, *args: Any, **kwargs: Any) -> "AsyncGenerator Yields: A psycopg AsyncConnection instance. """ - if self.pool_instance: - async with self.pool_instance.connection() as conn: + if self.connection_instance: + async with self.connection_instance.connection() as conn: yield conn # type: ignore[misc] else: conn = await self.create_connection() # type: ignore[assignment] @@ -462,9 +462,9 @@ async def provide_pool(self, *args: Any, **kwargs: Any) -> "AsyncConnectionPool" Returns: The async connection pool. """ - if not self.pool_instance: - self.pool_instance = await self.create_pool() - return self.pool_instance + if not self.connection_instance: + self.connection_instance = await self.create_pool() + return self.connection_instance def get_signature_namespace(self) -> "dict[str, Any]": """Get the signature namespace for Psycopg async types. diff --git a/sqlspec/adapters/psycopg/litestar/store.py b/sqlspec/adapters/psycopg/litestar/store.py index 4a0237e09..9b16d4a39 100644 --- a/sqlspec/adapters/psycopg/litestar/store.py +++ b/sqlspec/adapters/psycopg/litestar/store.py @@ -36,7 +36,7 @@ class PsycopgAsyncStore(BaseSQLSpecStore["PsycopgAsyncConfig"]): from sqlspec.adapters.psycopg import PsycopgAsyncConfig from sqlspec.adapters.psycopg.litestar.store import PsycopgAsyncStore - config = PsycopgAsyncConfig(pool_config={"conninfo": "postgresql://..."}) + config = PsycopgAsyncConfig(connection_config={"conninfo": "postgresql://..."}) store = PsycopgAsyncStore(config) await store.create_table() """ @@ -298,7 +298,7 @@ class PsycopgSyncStore(BaseSQLSpecStore["PsycopgSyncConfig"]): from sqlspec.adapters.psycopg import PsycopgSyncConfig from sqlspec.adapters.psycopg.litestar.store import PsycopgSyncStore - config = PsycopgSyncConfig(pool_config={"conninfo": "postgresql://..."}) + config = PsycopgSyncConfig(connection_config={"conninfo": "postgresql://..."}) store = PsycopgSyncStore(config) await store.create_table() """ diff --git a/sqlspec/adapters/spanner/config.py b/sqlspec/adapters/spanner/config.py index 0917842e0..d0192b0be 100644 --- a/sqlspec/adapters/spanner/config.py +++ b/sqlspec/adapters/spanner/config.py @@ -70,8 +70,8 @@ class SpannerSyncConfig(SyncDatabaseConfig["SpannerConnection", "AbstractSession def __init__( self, *, - pool_config: "SpannerPoolParams | dict[str, Any] | None" = None, - pool_instance: "AbstractSessionPool | None" = None, + connection_config: "SpannerPoolParams | dict[str, Any] | None" = None, + connection_instance: "AbstractSessionPool | None" = None, migration_config: "dict[str, Any] | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "SpannerDriverFeatures | dict[str, Any] | None" = None, @@ -79,11 +79,11 @@ def __init__( extension_config: "ExtensionConfigs | None" = None, observability_config: "ObservabilityConfig | None" = None, ) -> None: - self.pool_config = dict(pool_config) if pool_config else {} + self.connection_config = dict(connection_config) if connection_config else {} - self.pool_config.setdefault("min_sessions", 1) - self.pool_config.setdefault("max_sessions", 10) - self.pool_config.setdefault("pool_type", FixedSizePool) + self.connection_config.setdefault("min_sessions", 1) + self.connection_config.setdefault("max_sessions", 10) + self.connection_config.setdefault("pool_type", FixedSizePool) features: dict[str, Any] = dict(driver_features) if driver_features else {} features.setdefault("enable_uuid_conversion", True) @@ -93,8 +93,8 @@ def __init__( base_statement_config = statement_config or spanner_statement_config super().__init__( - pool_config=self.pool_config, - pool_instance=pool_instance, + connection_config=self.connection_config, + connection_instance=connection_instance, migration_config=migration_config, statement_config=base_statement_config, driver_features=features, @@ -109,70 +109,70 @@ def __init__( def _get_client(self) -> Client: if self._client is None: self._client = Client( - project=self.pool_config.get("project"), - credentials=self.pool_config.get("credentials"), - client_options=self.pool_config.get("client_options"), + project=self.connection_config.get("project"), + credentials=self.connection_config.get("credentials"), + client_options=self.connection_config.get("client_options"), ) return self._client def get_database(self) -> "Database": - instance_id = self.pool_config.get("instance_id") - database_id = self.pool_config.get("database_id") + instance_id = self.connection_config.get("instance_id") + database_id = self.connection_config.get("database_id") if not instance_id or not database_id: msg = "instance_id and database_id are required." raise ImproperConfigurationError(msg) - if self.pool_instance is None: - self.pool_instance = self.provide_pool() + if self.connection_instance is None: + self.connection_instance = self.provide_pool() if self._database is None: client = self._get_client() - self._database = client.instance(instance_id).database(database_id, pool=self.pool_instance) # type: ignore[no-untyped-call] + self._database = client.instance(instance_id).database(database_id, pool=self.connection_instance) # type: ignore[no-untyped-call] return self._database def create_connection(self) -> SpannerConnection: - instance_id = self.pool_config.get("instance_id") - database_id = self.pool_config.get("database_id") + instance_id = self.connection_config.get("instance_id") + database_id = self.connection_config.get("database_id") if not instance_id or not database_id: msg = "instance_id and database_id are required." raise ImproperConfigurationError(msg) - if self.pool_instance is None: - self.pool_instance = self.provide_pool() + if self.connection_instance is None: + self.connection_instance = self.provide_pool() client = self._get_client() - database = client.instance(instance_id).database(database_id, pool=self.pool_instance) # type: ignore[no-untyped-call] + database = client.instance(instance_id).database(database_id, pool=self.connection_instance) # type: ignore[no-untyped-call] return cast("SpannerConnection", database.snapshot()) def _create_pool(self) -> AbstractSessionPool: - instance_id = self.pool_config.get("instance_id") - database_id = self.pool_config.get("database_id") + instance_id = self.connection_config.get("instance_id") + database_id = self.connection_config.get("database_id") if not instance_id or not database_id: msg = "instance_id and database_id are required." raise ImproperConfigurationError(msg) - pool_type = cast("type[AbstractSessionPool]", self.pool_config.get("pool_type", FixedSizePool)) + pool_type = cast("type[AbstractSessionPool]", self.connection_config.get("pool_type", FixedSizePool)) pool_kwargs: dict[str, Any] = {} if pool_type is FixedSizePool: - if "size" in self.pool_config: - pool_kwargs["size"] = self.pool_config["size"] - elif "max_sessions" in self.pool_config: - pool_kwargs["size"] = self.pool_config["max_sessions"] - if "labels" in self.pool_config: - pool_kwargs["labels"] = self.pool_config["labels"] + if "size" in self.connection_config: + pool_kwargs["size"] = self.connection_config["size"] + elif "max_sessions" in self.connection_config: + pool_kwargs["size"] = self.connection_config["max_sessions"] + if "labels" in self.connection_config: + pool_kwargs["labels"] = self.connection_config["labels"] else: valid_pool_keys = {"size", "labels", "ping_interval"} - pool_kwargs = {k: v for k, v in self.pool_config.items() if k in valid_pool_keys and v is not None} - if "size" not in pool_kwargs and "max_sessions" in self.pool_config: - pool_kwargs["size"] = self.pool_config["max_sessions"] + pool_kwargs = {k: v for k, v in self.connection_config.items() if k in valid_pool_keys and v is not None} + if "size" not in pool_kwargs and "max_sessions" in self.connection_config: + pool_kwargs["size"] = self.connection_config["max_sessions"] pool_factory = cast("Callable[..., AbstractSessionPool]", pool_type) return pool_factory(**pool_kwargs) def _close_pool(self) -> None: - if self.pool_instance and hasattr(self.pool_instance, "close"): - cast("Any", self.pool_instance).close() + if self.connection_instance and hasattr(self.connection_instance, "close"): + cast("Any", self.connection_instance).close() @contextmanager def provide_connection( diff --git a/sqlspec/adapters/sqlite/config.py b/sqlspec/adapters/sqlite/config.py index f48d3969f..d07fbe7fa 100644 --- a/sqlspec/adapters/sqlite/config.py +++ b/sqlspec/adapters/sqlite/config.py @@ -72,8 +72,8 @@ class SqliteConfig(SyncDatabaseConfig[SqliteConnection, SqliteConnectionPool, Sq def __init__( self, *, - pool_config: "SqliteConnectionParams | dict[str, Any] | None" = None, - pool_instance: "SqliteConnectionPool | None" = None, + connection_config: "SqliteConnectionParams | dict[str, Any] | None" = None, + connection_instance: "SqliteConnectionPool | None" = None, migration_config: "dict[str, Any] | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "SqliteDriverFeatures | dict[str, Any] | None" = None, @@ -84,8 +84,8 @@ def __init__( """Initialize SQLite configuration. Args: - pool_config: Configuration parameters including connection settings - pool_instance: Pre-created pool instance + connection_config: Configuration parameters including connection settings + connection_instance: Pre-created pool instance migration_config: Migration configuration statement_config: Default SQL statement configuration driver_features: Optional driver feature configuration @@ -93,20 +93,20 @@ def __init__( extension_config: Extension-specific configuration (e.g., Litestar plugin settings) observability_config: Adapter-level observability overrides for lifecycle hooks and observers """ - if pool_config is None: - pool_config = {} - if "database" not in pool_config or pool_config["database"] == ":memory:": - pool_config["database"] = f"file:memory_{uuid.uuid4().hex}?mode=memory&cache=private" - pool_config["uri"] = True - elif "database" in pool_config: - database_path = str(pool_config["database"]) - if database_path.startswith("file:") and not pool_config.get("uri"): + if connection_config is None: + connection_config = {} + if "database" not in connection_config or connection_config["database"] == ":memory:": + connection_config["database"] = f"file:memory_{uuid.uuid4().hex}?mode=memory&cache=private" + connection_config["uri"] = True + elif "database" in connection_config: + database_path = str(connection_config["database"]) + if database_path.startswith("file:") and not connection_config.get("uri"): logger.debug( "Database URI detected (%s) but uri=True not set. " "Auto-enabling URI mode to prevent physical file creation.", database_path, ) - pool_config["uri"] = True + connection_config["uri"] = True processed_driver_features: dict[str, Any] = dict(driver_features) if driver_features else {} processed_driver_features.setdefault("enable_custom_adapters", True) @@ -122,8 +122,8 @@ def __init__( super().__init__( bind_key=bind_key, - pool_instance=pool_instance, - pool_config=cast("dict[str, Any]", pool_config), + connection_instance=connection_instance, + connection_config=cast("dict[str, Any]", connection_config), migration_config=migration_config, statement_config=base_statement_config, driver_features=processed_driver_features, @@ -135,13 +135,13 @@ def _get_connection_config_dict(self) -> "dict[str, Any]": """Get connection configuration as plain dict for pool creation.""" excluded_keys = {"pool_min_size", "pool_max_size", "pool_timeout", "pool_recycle_seconds", "extra"} - return {k: v for k, v in self.pool_config.items() if v is not None and k not in excluded_keys} + return {k: v for k, v in self.connection_config.items() if v is not None and k not in excluded_keys} def _create_pool(self) -> SqliteConnectionPool: """Create connection pool from configuration.""" config_dict = self._get_connection_config_dict() - pool = SqliteConnectionPool(connection_parameters=config_dict, **self.pool_config) + pool = SqliteConnectionPool(connection_parameters=config_dict, **self.connection_config) if self.driver_features.get("enable_custom_adapters", False): self._register_type_adapters() @@ -162,8 +162,8 @@ def _register_type_adapters(self) -> None: def _close_pool(self) -> None: """Close the connection pool.""" - if self.pool_instance: - self.pool_instance.close() + if self.connection_instance: + self.connection_instance.close() def create_connection(self) -> SqliteConnection: """Get a SQLite connection from the pool. diff --git a/sqlspec/config.py b/sqlspec/config.py index f39b12e76..ae50b7a00 100644 --- a/sqlspec/config.py +++ b/sqlspec/config.py @@ -126,7 +126,7 @@ class FlaskConfig(TypedDict): from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "flask": { "commit_mode": "autocommit", @@ -214,7 +214,7 @@ class StarletteConfig(TypedDict): from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": { "commit_mode": "autocommit", @@ -276,7 +276,7 @@ class FastAPIConfig(StarletteConfig): from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "fastapi": { "commit_mode": "autocommit", @@ -295,7 +295,7 @@ class ADKConfig(TypedDict): from sqlspec.adapters.asyncpg import AsyncpgConfig config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "adk": { "session_table": "my_sessions", @@ -388,7 +388,7 @@ class ADKConfig(TypedDict): Examples: Oracle with in-memory enabled: config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "adk": { "in_memory": True @@ -495,11 +495,11 @@ class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): "_observability_runtime", "_storage_capabilities", "bind_key", + "connection_instance", "driver_features", "extension_config", "migration_config", "observability_config", - "pool_instance", "statement_config", ) @@ -520,7 +520,7 @@ class DatabaseConfigProtocol(ABC, Generic[ConnectionT, PoolT, DriverT]): storage_partition_strategies: "ClassVar[tuple[str, ...]]" = ("fixed",) bind_key: "str | None" statement_config: "StatementConfig" - pool_instance: "PoolT | None" + connection_instance: "PoolT | None" migration_config: "dict[str, Any] | MigrationConfig" extension_config: "ExtensionConfigs" driver_features: "dict[str, Any]" @@ -534,10 +534,15 @@ def __hash__(self) -> int: def __eq__(self, other: object) -> bool: if not isinstance(other, type(self)): return False - return bool(self.pool_instance == other.pool_instance and self.migration_config == other.migration_config) + return bool( + self.connection_instance == other.connection_instance and self.migration_config == other.migration_config + ) def __repr__(self) -> str: - parts = ", ".join([f"pool_instance={self.pool_instance!r}", f"migration_config={self.migration_config!r}"]) + parts = ", ".join([ + f"connection_instance={self.connection_instance!r}", + f"migration_config={self.migration_config!r}", + ]) return f"{type(self).__name__}({parts})" def storage_capabilities(self) -> "StorageCapabilities": @@ -916,6 +921,7 @@ def __init__( self, *, connection_config: dict[str, Any] | None = None, + connection_instance: "Any" = None, migration_config: "dict[str, Any] | MigrationConfig | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "dict[str, Any] | None" = None, @@ -924,7 +930,7 @@ def __init__( observability_config: "ObservabilityConfig | None" = None, ) -> None: self.bind_key = bind_key - self.pool_instance = None + self.connection_instance = connection_instance self.connection_config = connection_config or {} self.extension_config = extension_config or {} self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} @@ -1061,6 +1067,7 @@ def __init__( self, *, connection_config: "dict[str, Any] | None" = None, + connection_instance: "Any" = None, migration_config: "dict[str, Any] | MigrationConfig | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "dict[str, Any] | None" = None, @@ -1069,7 +1076,7 @@ def __init__( observability_config: "ObservabilityConfig | None" = None, ) -> None: self.bind_key = bind_key - self.pool_instance = None + self.connection_instance = connection_instance self.connection_config = connection_config or {} self.extension_config = extension_config or {} self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} @@ -1195,7 +1202,7 @@ async def fix_migrations(self, dry_run: bool = False, update_database: bool = Tr class SyncDatabaseConfig(DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]): """Base class for sync database configurations with connection pooling.""" - __slots__ = ("pool_config",) + __slots__ = ("connection_config",) is_async: "ClassVar[bool]" = False supports_connection_pooling: "ClassVar[bool]" = True migration_tracker_type: "ClassVar[type[Any]]" = SyncMigrationTracker @@ -1203,8 +1210,8 @@ class SyncDatabaseConfig(DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]): def __init__( self, *, - pool_config: "dict[str, Any] | None" = None, - pool_instance: "PoolT | None" = None, + connection_config: "dict[str, Any] | None" = None, + connection_instance: "PoolT | None" = None, migration_config: "dict[str, Any] | MigrationConfig | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "dict[str, Any] | None" = None, @@ -1213,8 +1220,8 @@ def __init__( observability_config: "ObservabilityConfig | None" = None, ) -> None: self.bind_key = bind_key - self.pool_instance = pool_instance - self.pool_config = pool_config or {} + self.connection_instance = connection_instance + self.connection_config = connection_config or {} self.extension_config = extension_config or {} self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} self._init_observability(observability_config) @@ -1239,25 +1246,25 @@ def create_pool(self) -> PoolT: Returns: The created pool. """ - if self.pool_instance is not None: - return self.pool_instance - self.pool_instance = self._create_pool() - self.get_observability_runtime().emit_pool_create(self.pool_instance) - return self.pool_instance + if self.connection_instance is not None: + return self.connection_instance + self.connection_instance = self._create_pool() + self.get_observability_runtime().emit_pool_create(self.connection_instance) + return self.connection_instance def close_pool(self) -> None: """Close the connection pool.""" - pool = self.pool_instance + pool = self.connection_instance self._close_pool() if pool is not None: self.get_observability_runtime().emit_pool_destroy(pool) - self.pool_instance = None + self.connection_instance = None def provide_pool(self, *args: Any, **kwargs: Any) -> PoolT: """Provide pool instance.""" - if self.pool_instance is None: - self.pool_instance = self.create_pool() - return self.pool_instance + if self.connection_instance is None: + self.connection_instance = self.create_pool() + return self.connection_instance def create_connection(self) -> ConnectionT: """Create a database connection.""" @@ -1368,7 +1375,7 @@ def fix_migrations(self, dry_run: bool = False, update_database: bool = True, ye class AsyncDatabaseConfig(DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]): """Base class for async database configurations with connection pooling.""" - __slots__ = ("pool_config",) + __slots__ = ("connection_config",) is_async: "ClassVar[bool]" = True supports_connection_pooling: "ClassVar[bool]" = True migration_tracker_type: "ClassVar[type[Any]]" = AsyncMigrationTracker @@ -1376,8 +1383,8 @@ class AsyncDatabaseConfig(DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]): def __init__( self, *, - pool_config: "dict[str, Any] | None" = None, - pool_instance: "PoolT | None" = None, + connection_config: "dict[str, Any] | None" = None, + connection_instance: "PoolT | None" = None, migration_config: "dict[str, Any] | MigrationConfig | None" = None, statement_config: "StatementConfig | None" = None, driver_features: "dict[str, Any] | None" = None, @@ -1386,8 +1393,8 @@ def __init__( observability_config: "ObservabilityConfig | None" = None, ) -> None: self.bind_key = bind_key - self.pool_instance = pool_instance - self.pool_config = pool_config or {} + self.connection_instance = connection_instance + self.connection_config = connection_config or {} self.extension_config = extension_config or {} self.migration_config: dict[str, Any] | MigrationConfig = migration_config or {} self._init_observability(observability_config) @@ -1414,25 +1421,25 @@ async def create_pool(self) -> PoolT: Returns: The created pool. """ - if self.pool_instance is not None: - return self.pool_instance - self.pool_instance = await self._create_pool() - self.get_observability_runtime().emit_pool_create(self.pool_instance) - return self.pool_instance + if self.connection_instance is not None: + return self.connection_instance + self.connection_instance = await self._create_pool() + self.get_observability_runtime().emit_pool_create(self.connection_instance) + return self.connection_instance async def close_pool(self) -> None: """Close the connection pool.""" - pool = self.pool_instance + pool = self.connection_instance await self._close_pool() if pool is not None: self.get_observability_runtime().emit_pool_destroy(pool) - self.pool_instance = None + self.connection_instance = None async def provide_pool(self, *args: Any, **kwargs: Any) -> PoolT: """Provide pool instance.""" - if self.pool_instance is None: - self.pool_instance = await self.create_pool() - return self.pool_instance + if self.connection_instance is None: + self.connection_instance = await self.create_pool() + return self.connection_instance async def create_connection(self) -> ConnectionT: """Create a database connection.""" diff --git a/sqlspec/extensions/adk/__init__.py b/sqlspec/extensions/adk/__init__.py index f04827267..11799362e 100644 --- a/sqlspec/extensions/adk/__init__.py +++ b/sqlspec/extensions/adk/__init__.py @@ -17,7 +17,7 @@ from sqlspec.extensions.adk import SQLSpecSessionService config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={ "adk": { "session_table": "my_sessions", diff --git a/sqlspec/extensions/adk/service.py b/sqlspec/extensions/adk/service.py index 22a945dc5..34d6d0771 100644 --- a/sqlspec/extensions/adk/service.py +++ b/sqlspec/extensions/adk/service.py @@ -34,7 +34,7 @@ class SQLSpecSessionService(BaseSessionService): from sqlspec.adapters.asyncpg.adk.store import AsyncpgADKStore from sqlspec.extensions.adk.service import SQLSpecSessionService - config = AsyncpgConfig(pool_config={"dsn": "postgresql://..."}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://..."}) store = AsyncpgADKStore(config) await store.create_tables() diff --git a/sqlspec/extensions/fastapi/extension.py b/sqlspec/extensions/fastapi/extension.py index bba7ea8aa..9b7e60aa0 100644 --- a/sqlspec/extensions/fastapi/extension.py +++ b/sqlspec/extensions/fastapi/extension.py @@ -32,7 +32,7 @@ class SQLSpecPlugin(_StarlettePlugin): sqlspec = SQLSpec() config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": { "commit_mode": "autocommit", diff --git a/sqlspec/extensions/flask/__init__.py b/sqlspec/extensions/flask/__init__.py index 3d6bc8bb5..70974f7b8 100644 --- a/sqlspec/extensions/flask/__init__.py +++ b/sqlspec/extensions/flask/__init__.py @@ -11,7 +11,7 @@ sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": { "commit_mode": "autocommit", diff --git a/sqlspec/extensions/flask/extension.py b/sqlspec/extensions/flask/extension.py index a8fafa4e9..fc30a7a3a 100644 --- a/sqlspec/extensions/flask/extension.py +++ b/sqlspec/extensions/flask/extension.py @@ -36,7 +36,7 @@ class SQLSpecPlugin: sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": "app.db"}, + connection_config={"database": "app.db"}, extension_config={ "flask": { "commit_mode": "autocommit", diff --git a/sqlspec/extensions/litestar/config.py b/sqlspec/extensions/litestar/config.py index ddd5b5a39..9a466690f 100644 --- a/sqlspec/extensions/litestar/config.py +++ b/sqlspec/extensions/litestar/config.py @@ -14,7 +14,7 @@ class LitestarConfig(TypedDict): from sqlspec.adapters.oracledb import OracleAsyncConfig config = OracleAsyncConfig( - pool_config={"dsn": "oracle://localhost/XEPDB1"}, + connection_config={"dsn": "oracle://localhost/XEPDB1"}, extension_config={ "litestar": { "session_table": "my_sessions", @@ -52,7 +52,7 @@ class LitestarConfig(TypedDict): Examples: Oracle with in-memory enabled: config = OracleAsyncConfig( - pool_config={"dsn": "oracle://..."}, + connection_config={"dsn": "oracle://..."}, extension_config={ "litestar": { "in_memory": True diff --git a/sqlspec/extensions/litestar/plugin.py b/sqlspec/extensions/litestar/plugin.py index ed4ba985f..dfc720420 100644 --- a/sqlspec/extensions/litestar/plugin.py +++ b/sqlspec/extensions/litestar/plugin.py @@ -194,7 +194,7 @@ class SQLSpecPlugin(InitPluginProtocol, CLIPlugin): Example: config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/db"}, + connection_config={"dsn": "postgresql://localhost/db"}, extension_config={ "litestar": { "session_table": "custom_sessions" # Optional custom table name diff --git a/sqlspec/extensions/litestar/store.py b/sqlspec/extensions/litestar/store.py index 760d27a9c..977d3327d 100644 --- a/sqlspec/extensions/litestar/store.py +++ b/sqlspec/extensions/litestar/store.py @@ -44,7 +44,7 @@ class BaseSQLSpecStore(ABC, Generic[ConfigT]): from sqlspec.adapters.asyncpg.litestar.store import AsyncpgStore config = AsyncpgConfig( - pool_config={"dsn": "postgresql://..."}, + connection_config={"dsn": "postgresql://..."}, extension_config={"litestar": {"session_table": "my_sessions"}} ) store = AsyncpgStore(config) diff --git a/sqlspec/extensions/starlette/extension.py b/sqlspec/extensions/starlette/extension.py index 8344e6baf..e60d80d38 100644 --- a/sqlspec/extensions/starlette/extension.py +++ b/sqlspec/extensions/starlette/extension.py @@ -39,7 +39,7 @@ class SQLSpecPlugin: sqlspec = SQLSpec() sqlspec.add_config(AsyncpgConfig( bind_key="default", - pool_config={"dsn": "postgresql://localhost/mydb"}, + connection_config={"dsn": "postgresql://localhost/mydb"}, extension_config={ "starlette": { "commit_mode": "autocommit", diff --git a/sqlspec/utils/config_resolver.py b/sqlspec/utils/config_resolver.py index 59adb2856..c441c736c 100644 --- a/sqlspec/utils/config_resolver.py +++ b/sqlspec/utils/config_resolver.py @@ -145,7 +145,7 @@ def _is_valid_config(config: Any) -> bool: migration_config = getattr(config, "migration_config", None) if migration_config is not None: - if hasattr(config, "pool_config"): + if hasattr(config, "connection_config"): return True if hasattr(config, "database_url") and hasattr(config, "bind_key"): return True diff --git a/tests/integration/test_adapters/test_aiosqlite/conftest.py b/tests/integration/test_adapters/test_aiosqlite/conftest.py index fcd2be58b..dbd88a9e6 100644 --- a/tests/integration/test_adapters/test_aiosqlite/conftest.py +++ b/tests/integration/test_adapters/test_aiosqlite/conftest.py @@ -63,7 +63,7 @@ async def aiosqlite_config_file() -> AsyncGenerator[AiosqliteConfig, None]: with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp: db_path = tmp.name - config = AiosqliteConfig(pool_config={"database": db_path, "pool_size": 5}) + config = AiosqliteConfig(connection_config={"database": db_path, "pool_size": 5}) try: yield config diff --git a/tests/integration/test_adapters/test_aiosqlite/test_connection.py b/tests/integration/test_adapters/test_aiosqlite/test_connection.py index 86bfdca38..f73b8c2dc 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_connection.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_connection.py @@ -134,13 +134,13 @@ async def test_provide_connection_direct() -> None: await config.close_pool() -async def test_config_with_pool_config(tmp_path: Path) -> None: - """Test that AiosqliteConfig correctly accepts pool_config parameter.""" +async def test_config_with_connection_config(tmp_path: Path) -> None: + """Test that AiosqliteConfig correctly accepts connection_config parameter.""" db_path = tmp_path / f"test_{uuid4().hex}.db" - pool_config = {"database": str(db_path), "timeout": 10.0, "isolation_level": None, "check_same_thread": False} + connection_config = {"database": str(db_path), "timeout": 10.0, "isolation_level": None, "check_same_thread": False} - config = AiosqliteConfig(pool_config=pool_config) + config = AiosqliteConfig(connection_config=connection_config) try: connection_config = config._get_connection_config_dict() @@ -161,14 +161,14 @@ async def test_config_with_pool_config(tmp_path: Path) -> None: async def test_config_with_kwargs_override(tmp_path: Path) -> None: - """Test that kwargs properly override pool_config values.""" + """Test that kwargs properly override connection_config values.""" - pool_config = {"database": "base.db", "timeout": 5.0} + connection_config = {"database": "base.db", "timeout": 5.0} db_path = tmp_path / f"override_{uuid4().hex}.db" - # Override pool_config with specific test values - test_pool_config = {**pool_config, "database": str(db_path), "timeout": 15.0} - config = AiosqliteConfig(pool_config=test_pool_config) + # Override connection_config with specific test values + test_connection_config = {**connection_config, "database": str(db_path), "timeout": 15.0} + config = AiosqliteConfig(connection_config=test_connection_config) try: connection_config = config._get_connection_config_dict() @@ -225,7 +225,7 @@ def hook(ctx: dict[str, Any]) -> None: async def test_config_memory_database_conversion() -> None: """Test that :memory: databases are converted to shared memory.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) try: connection_config = config._get_connection_config_dict() @@ -264,9 +264,9 @@ async def test_config_parameter_preservation(tmp_path: Path) -> None: """Test that aiosqlite config properly preserves parameters.""" db_path = tmp_path / "parameter_test.db" - pool_config = {"database": str(db_path), "isolation_level": None, "cached_statements": 100} + connection_config = {"database": str(db_path), "isolation_level": None, "cached_statements": 100} - config = AiosqliteConfig(pool_config=pool_config) + config = AiosqliteConfig(connection_config=connection_config) try: connection_config = config._get_connection_config_dict() diff --git a/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_numpy_serialization.py b/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_numpy_serialization.py index 649986024..d3443cfa7 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_numpy_serialization.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_numpy_serialization.py @@ -30,7 +30,7 @@ def test_litestar_numpy_encoder_registered() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -45,7 +45,7 @@ def test_litestar_numpy_decoder_registered() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -60,7 +60,7 @@ def test_litestar_numpy_response_encoding() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -87,7 +87,7 @@ def test_litestar_numpy_request_decoding() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -113,7 +113,7 @@ def test_litestar_numpy_round_trip() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -147,7 +147,7 @@ def test_litestar_numpy_multidimensional_arrays() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -172,7 +172,7 @@ def test_litestar_numpy_empty_array() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -197,7 +197,7 @@ def test_litestar_numpy_various_dtypes() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) @@ -231,7 +231,7 @@ def test_litestar_numpy_large_embedding_vector() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"commit_mode": "manual"}} ) sql.add_config(config) diff --git a/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_store.py b/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_store.py index 8ba7a321b..7e3b0b159 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_store.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_extensions/test_litestar/test_store.py @@ -16,7 +16,7 @@ async def aiosqlite_store() -> "AsyncGenerator[AiosqliteStore, None]": """Create AioSQLite store with in-memory database.""" config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"litestar": {"session_table": "test_sessions"}} + connection_config={"database": ":memory:"}, extension_config={"litestar": {"session_table": "test_sessions"}} ) store = AiosqliteStore(config) await store.create_table() diff --git a/tests/integration/test_adapters/test_aiosqlite/test_migrations.py b/tests/integration/test_adapters/test_aiosqlite/test_migrations.py index ecd024e1b..ae61082e8 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_migrations.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_migrations.py @@ -21,7 +21,7 @@ async def test_aiosqlite_migration_full_workflow(tmp_path: Path) -> None: db_path = tmp_path / "test.db" config = AiosqliteConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, ) commands = AsyncMigrationCommands(config) @@ -74,7 +74,7 @@ def down(): result = await driver.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{users_table}'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -90,7 +90,7 @@ async def test_aiosqlite_multiple_migrations_workflow(tmp_path: Path) -> None: db_path = tmp_path / "test.db" config = AiosqliteConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, ) commands = AsyncMigrationCommands(config) @@ -178,7 +178,7 @@ def down(): table_names = [t["name"] for t in tables_result.data if not t["name"].startswith("sqlspec_")] assert len(table_names) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -193,7 +193,7 @@ async def test_aiosqlite_migration_current_command(tmp_path: Path) -> None: db_path = tmp_path / "test.db" config = AiosqliteConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, ) commands = AsyncMigrationCommands(config) @@ -222,7 +222,7 @@ def down(): await commands.current(verbose=True) finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -236,7 +236,7 @@ async def test_aiosqlite_migration_error_handling(tmp_path: Path) -> None: db_path = tmp_path / "test.db" config = AiosqliteConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, ) commands = AsyncMigrationCommands(config) @@ -269,7 +269,7 @@ def down(): except Exception as e: assert "no such" in str(e).lower() or "does not exist" in str(e).lower() finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -284,7 +284,7 @@ async def test_aiosqlite_migration_with_transactions(tmp_path: Path) -> None: db_path = tmp_path / "test.db" config = AiosqliteConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, ) commands = AsyncMigrationCommands(config) @@ -330,5 +330,5 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_aiosqlite/test_parameter_styles.py b/tests/integration/test_adapters/test_aiosqlite/test_parameter_styles.py index 8f832bdd0..8f3b1c40c 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_parameter_styles.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_parameter_styles.py @@ -14,7 +14,7 @@ async def test_aiosqlite_none_parameters() -> None: """Test that None values in named parameters are handled correctly by AIOSQLite.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Create test table @@ -81,7 +81,7 @@ async def test_aiosqlite_none_parameters() -> None: async def test_aiosqlite_none_parameters_qmark_style() -> None: """Test None values with QMARK (?) parameter style - AIOSQLite default.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Create test table @@ -113,7 +113,7 @@ async def test_aiosqlite_none_parameters_qmark_style() -> None: async def test_aiosqlite_all_none_parameters() -> None: """Test when all parameter values are None.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Create test table @@ -155,7 +155,7 @@ async def test_aiosqlite_all_none_parameters() -> None: async def test_aiosqlite_none_with_execute_many() -> None: """Test None values work correctly with execute_many.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Create test table @@ -196,7 +196,7 @@ async def test_aiosqlite_none_with_execute_many() -> None: async def test_aiosqlite_none_in_where_clause() -> None: """Test None values in WHERE clauses work correctly.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Create test table @@ -236,7 +236,7 @@ async def test_aiosqlite_none_in_where_clause() -> None: async def test_aiosqlite_none_complex_parameter_scenarios() -> None: """Test complex scenarios with None parameters that might cause issues.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Create test table @@ -296,7 +296,7 @@ async def test_aiosqlite_none_complex_parameter_scenarios() -> None: async def test_aiosqlite_none_parameter_edge_cases() -> None: """Test edge cases that might reveal parameter handling bugs.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: # Test 1: Empty parameter list with None @@ -346,7 +346,7 @@ async def test_aiosqlite_parameter_count_mismatch_with_none() -> None: This test verifies the bug mentioned in the original issue where parameter count mismatches might be missed when None values are present. """ - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) async with config.provide_session() as driver: await driver.execute("CREATE TABLE test_param_count (col1 TEXT, col2 INTEGER)") diff --git a/tests/integration/test_adapters/test_aiosqlite/test_pooling.py b/tests/integration/test_adapters/test_aiosqlite/test_pooling.py index 954e6079f..640d3d0da 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_pooling.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_pooling.py @@ -18,7 +18,12 @@ async def test_shared_memory_pooling() -> None: """Test that shared memory databases allow pooling.""" config = AiosqliteConfig( - pool_config={"database": "file::memory:?cache=shared", "uri": True, "pool_min_size": 2, "pool_max_size": 5} + connection_config={ + "database": "file::memory:?cache=shared", + "uri": True, + "pool_min_size": 2, + "pool_max_size": 5, + } ) try: @@ -53,7 +58,7 @@ async def test_shared_memory_pooling() -> None: async def test_regular_memory_auto_converted_pooling() -> None: """Test that regular memory databases are auto-converted and pooling works.""" - config = AiosqliteConfig(pool_config={"database": ":memory:", "pool_min_size": 5, "pool_max_size": 10}) + config = AiosqliteConfig(connection_config={"database": ":memory:", "pool_min_size": 5, "pool_max_size": 10}) try: assert config._get_connection_config_dict()["database"] == "file::memory:?cache=shared" @@ -91,7 +96,7 @@ async def test_file_database_pooling_enabled() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp: db_path = tmp.name - config = AiosqliteConfig(pool_config={"database": db_path, "pool_min_size": 3, "pool_max_size": 8}) + config = AiosqliteConfig(connection_config={"database": db_path, "pool_min_size": 3, "pool_max_size": 8}) try: async with config.provide_session() as session1: diff --git a/tests/integration/test_adapters/test_asyncmy/conftest.py b/tests/integration/test_adapters/test_asyncmy/conftest.py index 6e5a446e6..6e8523918 100644 --- a/tests/integration/test_adapters/test_asyncmy/conftest.py +++ b/tests/integration/test_adapters/test_asyncmy/conftest.py @@ -12,7 +12,7 @@ async def asyncmy_config(mysql_service: MySQLService) -> AsyncmyConfig: """Create AsyncMy configuration for testing.""" return AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/tests/integration/test_adapters/test_asyncmy/test_asyncmy_features.py b/tests/integration/test_adapters/test_asyncmy/test_asyncmy_features.py index 60c8e4199..51170c14e 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_asyncmy_features.py +++ b/tests/integration/test_adapters/test_asyncmy/test_asyncmy_features.py @@ -23,7 +23,7 @@ async def asyncmy_pooled_session(mysql_service: MySQLService) -> AsyncGenerator[AsyncmyDriver, None]: """Create AsyncMy session with connection pooling.""" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/tests/integration/test_adapters/test_asyncmy/test_config.py b/tests/integration/test_adapters/test_asyncmy/test_config.py index 400baadfc..88cdef78b 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_config.py +++ b/tests/integration/test_adapters/test_asyncmy/test_config.py @@ -36,21 +36,21 @@ def test_asyncmy_typed_dict_structure() -> None: def test_asyncmy_config_basic_creation() -> None: """Test Asyncmy config creation with basic parameters.""" - pool_config = { + connection_config = { "host": "localhost", "port": 3306, "user": "test_user", "password": "test_password", "database": "test_db", } - config = AsyncmyConfig(pool_config=pool_config) - assert config.pool_config["host"] == "localhost" - assert config.pool_config["port"] == 3306 - assert config.pool_config["user"] == "test_user" - assert config.pool_config["password"] == "test_password" - assert config.pool_config["database"] == "test_db" - - pool_config_full = { + config = AsyncmyConfig(connection_config=connection_config) + assert config.connection_config["host"] == "localhost" + assert config.connection_config["port"] == 3306 + assert config.connection_config["user"] == "test_user" + assert config.connection_config["password"] == "test_password" + assert config.connection_config["database"] == "test_db" + + connection_config_full = { "host": "localhost", "port": 3306, "user": "test_user", @@ -58,44 +58,44 @@ def test_asyncmy_config_basic_creation() -> None: "database": "test_db", "custom": "value", } - config_full = AsyncmyConfig(pool_config=pool_config_full) - assert config_full.pool_config["host"] == "localhost" - assert config_full.pool_config["port"] == 3306 - assert config_full.pool_config["user"] == "test_user" - assert config_full.pool_config["password"] == "test_password" - assert config_full.pool_config["database"] == "test_db" - assert config_full.pool_config["custom"] == "value" + config_full = AsyncmyConfig(connection_config=connection_config_full) + assert config_full.connection_config["host"] == "localhost" + assert config_full.connection_config["port"] == 3306 + assert config_full.connection_config["user"] == "test_user" + assert config_full.connection_config["password"] == "test_password" + assert config_full.connection_config["database"] == "test_db" + assert config_full.connection_config["custom"] == "value" def test_asyncmy_config_initialization() -> None: """Test Asyncmy config initialization.""" - pool_config = { + connection_config = { "host": "localhost", "port": 3306, "user": "test_user", "password": "test_password", "database": "test_db", } - config = AsyncmyConfig(pool_config=pool_config) + config = AsyncmyConfig(connection_config=connection_config) assert isinstance(config.statement_config, StatementConfig) custom_statement_config = StatementConfig() - config = AsyncmyConfig(pool_config=pool_config, statement_config=custom_statement_config) + config = AsyncmyConfig(connection_config=connection_config, statement_config=custom_statement_config) assert config.statement_config is custom_statement_config async def test_asyncmy_config_provide_session(mysql_service: MySQLService) -> None: """Test Asyncmy config provide_session context manager.""" - pool_config = { + connection_config = { "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, "password": mysql_service.password, "database": mysql_service.db, } - config = AsyncmyConfig(pool_config=pool_config) + config = AsyncmyConfig(connection_config=connection_config) async with config.provide_session() as session: assert isinstance(session, AsyncmyDriver) @@ -106,41 +106,41 @@ async def test_asyncmy_config_provide_session(mysql_service: MySQLService) -> No def test_asyncmy_config_driver_type() -> None: """Test Asyncmy config driver_type property.""" - pool_config = { + connection_config = { "host": "localhost", "port": 3306, "user": "test_user", "password": "test_password", "database": "test_db", } - config = AsyncmyConfig(pool_config=pool_config) + config = AsyncmyConfig(connection_config=connection_config) assert config.driver_type is AsyncmyDriver def test_asyncmy_config_is_async() -> None: """Test Asyncmy config is_async attribute.""" - pool_config = { + connection_config = { "host": "localhost", "port": 3306, "user": "test_user", "password": "test_password", "database": "test_db", } - config = AsyncmyConfig(pool_config=pool_config) + config = AsyncmyConfig(connection_config=connection_config) assert config.is_async is True assert AsyncmyConfig.is_async is True def test_asyncmy_config_supports_connection_pooling() -> None: """Test Asyncmy config supports_connection_pooling attribute.""" - pool_config = { + connection_config = { "host": "localhost", "port": 3306, "user": "test_user", "password": "test_password", "database": "test_db", } - config = AsyncmyConfig(pool_config=pool_config) + config = AsyncmyConfig(connection_config=connection_config) assert config.supports_connection_pooling is True assert AsyncmyConfig.supports_connection_pooling is True @@ -181,7 +181,7 @@ def custom_deserializer(data: str) -> object: features: AsyncmyDriverFeatures = {"json_serializer": custom_serializer, "json_deserializer": custom_deserializer} - config = AsyncmyConfig(pool_config={"host": "localhost", "port": 3306}, driver_features=features) + config = AsyncmyConfig(connection_config={"host": "localhost", "port": 3306}, driver_features=features) assert config.driver_features["json_serializer"] is custom_serializer assert config.driver_features["json_deserializer"] is custom_deserializer @@ -189,7 +189,7 @@ def custom_deserializer(data: str) -> object: def test_asyncmy_config_with_empty_driver_features() -> None: """Test AsyncmyConfig with empty driver_features still provides defaults.""" - config = AsyncmyConfig(pool_config={"host": "localhost", "port": 3306}, driver_features={}) + config = AsyncmyConfig(connection_config={"host": "localhost", "port": 3306}, driver_features={}) assert "json_serializer" in config.driver_features assert "json_deserializer" in config.driver_features @@ -199,7 +199,7 @@ def test_asyncmy_config_with_empty_driver_features() -> None: def test_asyncmy_config_without_driver_features() -> None: """Test AsyncmyConfig without driver_features provides sensible defaults.""" - config = AsyncmyConfig(pool_config={"host": "localhost", "port": 3306}) + config = AsyncmyConfig(connection_config={"host": "localhost", "port": 3306}) assert "json_serializer" in config.driver_features assert "json_deserializer" in config.driver_features @@ -214,7 +214,7 @@ def custom_serializer(data: object) -> str: return str(data) config = AsyncmyConfig( - pool_config={"host": "localhost", "port": 3306}, driver_features={"json_serializer": custom_serializer} + connection_config={"host": "localhost", "port": 3306}, driver_features={"json_serializer": custom_serializer} ) assert config.driver_features["json_serializer"] is custom_serializer diff --git a/tests/integration/test_adapters/test_asyncmy/test_driver.py b/tests/integration/test_adapters/test_asyncmy/test_driver.py index 2dda21a37..3368a45b7 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_driver.py +++ b/tests/integration/test_adapters/test_asyncmy/test_driver.py @@ -226,7 +226,7 @@ def tracking_deserializer(value: str | bytes) -> object: return decoded config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/tests/integration/test_adapters/test_asyncmy/test_exceptions.py b/tests/integration/test_adapters/test_asyncmy/test_exceptions.py index 52e5e84e3..75c978ce9 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_exceptions.py +++ b/tests/integration/test_adapters/test_asyncmy/test_exceptions.py @@ -21,7 +21,7 @@ async def asyncmy_exception_session(mysql_service: MySQLService) -> AsyncGenerator[AsyncmyDriver, None]: """Create an asyncmy session for exception testing.""" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/tests/integration/test_adapters/test_asyncmy/test_extensions/test_adk/conftest.py b/tests/integration/test_adapters/test_asyncmy/test_extensions/test_adk/conftest.py index 1778b4f31..7514f3425 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_extensions/test_adk/conftest.py +++ b/tests/integration/test_adapters/test_asyncmy/test_extensions/test_adk/conftest.py @@ -24,7 +24,7 @@ async def asyncmy_adk_store(mysql_service: MySQLService) -> "AsyncGenerator[Asyn Tables are created before test and cleaned up after. """ config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -63,7 +63,7 @@ async def asyncmy_adk_store_with_fk(mysql_service: MySQLService) -> "AsyncGenera Tests multi-tenant isolation and CASCADE behavior. """ config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/tests/integration/test_adapters/test_asyncmy/test_extensions/test_litestar/test_store.py b/tests/integration/test_adapters/test_asyncmy/test_extensions/test_litestar/test_store.py index 50ec244b1..960a3f8f6 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_extensions/test_litestar/test_store.py +++ b/tests/integration/test_adapters/test_asyncmy/test_extensions/test_litestar/test_store.py @@ -17,7 +17,7 @@ async def asyncmy_store(mysql_service: MySQLService) -> "AsyncGenerator[AsyncmyStore, None]": """Create AsyncMy store with test database.""" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -36,7 +36,7 @@ async def asyncmy_store(mysql_service: MySQLService) -> "AsyncGenerator[AsyncmyS pass finally: try: - if config.pool_instance: + if config.connection_instance: await config.close_pool() except Exception: pass diff --git a/tests/integration/test_adapters/test_asyncmy/test_migrations.py b/tests/integration/test_adapters/test_asyncmy/test_migrations.py index 70a4773f3..362e3ca78 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_migrations.py +++ b/tests/integration/test_adapters/test_asyncmy/test_migrations.py @@ -21,7 +21,7 @@ async def test_asyncmy_migration_full_workflow(tmp_path: Path, mysql_service: My migration_dir = tmp_path / "migrations" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -89,7 +89,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -104,7 +104,7 @@ async def test_asyncmy_multiple_migrations_workflow(tmp_path: Path, mysql_servic migration_dir = tmp_path / "migrations" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -208,7 +208,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -222,7 +222,7 @@ async def test_asyncmy_migration_current_command(tmp_path: Path, mysql_service: migration_dir = tmp_path / "migrations" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -269,7 +269,7 @@ def down(): current_version = await commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -282,7 +282,7 @@ async def test_asyncmy_migration_error_handling(tmp_path: Path, mysql_service: M migration_dir = tmp_path / "migrations" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -317,7 +317,7 @@ def down(): count = await driver.select_value(f"SELECT COUNT(*) FROM {migration_table}") assert count == 0, f"Expected empty migration table after failed migration, but found {count} records" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -331,7 +331,7 @@ async def test_asyncmy_migration_with_transactions(tmp_path: Path, mysql_service migration_dir = tmp_path / "migrations" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, @@ -401,5 +401,5 @@ def down(): result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncmy/test_parameter_styles.py b/tests/integration/test_adapters/test_asyncmy/test_parameter_styles.py index e1dfaf0ed..52093cfea 100644 --- a/tests/integration/test_adapters/test_asyncmy/test_parameter_styles.py +++ b/tests/integration/test_adapters/test_asyncmy/test_parameter_styles.py @@ -27,7 +27,7 @@ async def asyncmy_parameter_session(mysql_service: MySQLService) -> AsyncGenerator[AsyncmyDriver, None]: """Create an asyncmy session for parameter conversion testing.""" config = AsyncmyConfig( - pool_config={ + connection_config={ "host": mysql_service.host, "port": mysql_service.port, "user": mysql_service.user, diff --git a/tests/integration/test_adapters/test_asyncpg/conftest.py b/tests/integration/test_adapters/test_asyncpg/conftest.py index 2054fc872..f27c77bb9 100644 --- a/tests/integration/test_adapters/test_asyncpg/conftest.py +++ b/tests/integration/test_adapters/test_asyncpg/conftest.py @@ -8,7 +8,7 @@ @pytest.fixture(scope="function") -def asyncpg_pool_config(postgres_service: PostgresService) -> "dict[str, Any]": +def asyncpg_connection_config(postgres_service: PostgresService) -> "dict[str, Any]": """Base pool configuration for AsyncPG tests.""" return { @@ -21,14 +21,14 @@ def asyncpg_pool_config(postgres_service: PostgresService) -> "dict[str, Any]": @pytest.fixture(scope="function") -async def asyncpg_config(asyncpg_pool_config: "dict[str, Any]") -> "AsyncGenerator[AsyncpgConfig, None]": +async def asyncpg_config(asyncpg_connection_config: "dict[str, Any]") -> "AsyncGenerator[AsyncpgConfig, None]": """Provide an AsyncpgConfig instance with shared pool settings.""" - config = AsyncpgConfig(pool_config=dict(asyncpg_pool_config)) + config = AsyncpgConfig(connection_config=dict(asyncpg_connection_config)) try: yield config finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncpg/test_cloud_connectors_integration.py b/tests/integration/test_adapters/test_asyncpg/test_cloud_connectors_integration.py index b34ae474d..c600e750c 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_cloud_connectors_integration.py +++ b/tests/integration/test_adapters/test_asyncpg/test_cloud_connectors_integration.py @@ -41,7 +41,7 @@ async def test_cloud_sql_connection_basic() -> None: password = os.environ.get("GOOGLE_CLOUD_SQL_PASSWORD") config = AsyncpgConfig( - pool_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": instance, "cloud_sql_enable_iam_auth": False}, ) @@ -64,7 +64,7 @@ async def test_cloud_sql_query_execution() -> None: password = os.environ.get("GOOGLE_CLOUD_SQL_PASSWORD") config = AsyncpgConfig( - pool_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": instance, "cloud_sql_enable_iam_auth": False}, ) @@ -87,7 +87,7 @@ async def test_cloud_sql_iam_auth() -> None: database = os.environ.get("GOOGLE_CLOUD_SQL_DATABASE", "postgres") config = AsyncpgConfig( - pool_config={"user": user, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "database": database, "min_size": 1, "max_size": 2}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": instance, "cloud_sql_enable_iam_auth": True}, ) @@ -110,7 +110,7 @@ async def test_cloud_sql_private_ip() -> None: password = os.environ.get("GOOGLE_CLOUD_SQL_PASSWORD") config = AsyncpgConfig( - pool_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": instance, @@ -138,7 +138,7 @@ async def test_alloydb_connection_basic() -> None: password = os.environ.get("GOOGLE_ALLOYDB_PASSWORD") config = AsyncpgConfig( - pool_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": instance_uri, @@ -165,7 +165,7 @@ async def test_alloydb_query_execution() -> None: password = os.environ.get("GOOGLE_ALLOYDB_PASSWORD") config = AsyncpgConfig( - pool_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "password": password, "database": database, "min_size": 1, "max_size": 2}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": instance_uri, @@ -192,7 +192,7 @@ async def test_alloydb_iam_auth() -> None: database = os.environ.get("GOOGLE_ALLOYDB_DATABASE", "postgres") config = AsyncpgConfig( - pool_config={"user": user, "database": database, "min_size": 1, "max_size": 2}, + connection_config={"user": user, "database": database, "min_size": 1, "max_size": 2}, driver_features={"enable_alloydb": True, "alloydb_instance_uri": instance_uri, "alloydb_enable_iam_auth": True}, ) diff --git a/tests/integration/test_adapters/test_asyncpg/test_connection.py b/tests/integration/test_adapters/test_asyncpg/test_connection.py index a431ba95f..0da052300 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_connection.py +++ b/tests/integration/test_adapters/test_asyncpg/test_connection.py @@ -10,7 +10,7 @@ async def test_async_connection(postgres_service: PostgresService) -> None: """Test asyncpg connection components.""" # Test direct connection async_config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": f"postgres://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "min_size": 1, "max_size": 2, @@ -28,7 +28,7 @@ async def test_async_connection(postgres_service: PostgresService) -> None: # Test connection pool another_config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": f"postgres://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "min_size": 1, "max_size": 5, diff --git a/tests/integration/test_adapters/test_asyncpg/test_driver.py b/tests/integration/test_adapters/test_asyncpg/test_driver.py index ec6bbd592..ecf9e7c46 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_driver.py +++ b/tests/integration/test_adapters/test_asyncpg/test_driver.py @@ -651,7 +651,7 @@ async def test_for_update_skip_locked(postgres_service: PostgresService) -> None from sqlspec import sql config = AsyncpgConfig( - pool_config={ + connection_config={ "dsn": f"postgres://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "min_size": 2, "max_size": 5, diff --git a/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/conftest.py b/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/conftest.py index 2f8e5dc02..fd46c57a9 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/conftest.py +++ b/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/conftest.py @@ -25,7 +25,7 @@ async def asyncpg_adk_store(postgres_service: PostgresService) -> "AsyncGenerato Pool is properly closed to avoid threading issues. """ config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -46,7 +46,7 @@ async def asyncpg_adk_store(postgres_service: PostgresService) -> "AsyncGenerato await conn.execute("DROP TABLE IF EXISTS adk_events CASCADE") await conn.execute("DROP TABLE IF EXISTS adk_sessions CASCADE") finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/test_owner_id_column.py b/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/test_owner_id_column.py index d2c884e9f..1e4606c9c 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/test_owner_id_column.py +++ b/tests/integration/test_adapters/test_asyncpg/test_extensions/test_adk/test_owner_id_column.py @@ -26,7 +26,7 @@ def _make_config_with_owner_id( adk_settings["owner_id_column"] = owner_id_column return AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -47,7 +47,7 @@ async def asyncpg_config_for_fk(postgres_service: Any) -> "AsyncGenerator[Asyncp try: yield config finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -137,7 +137,7 @@ async def test_create_tables_with_owner_id_column( assert result["data_type"] == "integer" assert result["is_nullable"] == "NO" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -162,7 +162,7 @@ async def test_create_session_with_owner_id(tenants_table: Any, postgres_service assert result is not None assert result["tenant_id"] == 1 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -177,7 +177,7 @@ async def test_create_session_without_owner_id_when_configured(tenants_table: An assert session["id"] == "session-1" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -193,7 +193,7 @@ async def test_fk_constraint_enforcement_not_null(tenants_table: Any, postgres_s with pytest.raises(asyncpg.ForeignKeyViolationError): await store.create_session("session-invalid", "app-1", "user-1", {"data": "test"}, owner_id=999) finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -224,7 +224,7 @@ async def test_cascade_delete_behavior(tenants_table: Any, postgres_service: Any assert session2 is None assert session3 is not None finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -246,7 +246,7 @@ async def test_nullable_owner_id_column(tenants_table: Any, postgres_service: An assert result is not None assert result["tenant_id"] is None finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -272,7 +272,7 @@ async def test_set_null_on_delete_behavior(tenants_table: Any, postgres_service: assert result is not None assert result["tenant_id"] is None finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -298,7 +298,7 @@ async def test_uuid_owner_id_column(users_table: Any, postgres_service: Any) -> assert result is not None assert result["account_id"] == user_uuid finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -316,7 +316,7 @@ async def test_deferrable_initially_deferred_fk(tenants_table: Any, postgres_ser assert session is not None finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -350,7 +350,7 @@ async def test_owner_id_column_name_property(tenants_table: Any, postgres_servic assert store.owner_id_column_name == "tenant_id" assert store.owner_id_column_ddl == "tenant_id INTEGER NOT NULL REFERENCES tenants(id)" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -379,7 +379,7 @@ async def test_multiple_sessions_same_tenant(tenants_table: Any, postgres_servic assert len(result) == 5 assert [r["id"] for r in result] == [f"session-{i}" for i in range(5)] finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -407,5 +407,5 @@ async def test_owner_id_with_custom_table_names(tenants_table: Any, postgres_ser await conn.execute("DROP TABLE IF EXISTS custom_events CASCADE") await conn.execute("DROP TABLE IF EXISTS custom_sessions CASCADE") finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncpg/test_migrations.py b/tests/integration/test_adapters/test_asyncpg/test_migrations.py index 30151304f..746523cab 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_migrations.py +++ b/tests/integration/test_adapters/test_asyncpg/test_migrations.py @@ -16,7 +16,7 @@ async def test_asyncpg_migration_full_workflow(tmp_path: Path, postgres_service: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -79,7 +79,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -88,7 +88,7 @@ async def test_asyncpg_multiple_migrations_workflow(tmp_path: Path, postgres_ser migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -183,7 +183,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -192,7 +192,7 @@ async def test_asyncpg_migration_current_command(tmp_path: Path, postgres_servic migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -238,7 +238,7 @@ def down(): current_version = await commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -247,7 +247,7 @@ async def test_asyncpg_migration_error_handling(tmp_path: Path, postgres_service migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -285,7 +285,7 @@ def down(): except Exception as e: assert "no such" in str(e).lower() or "does not exist" in str(e).lower() finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -294,7 +294,7 @@ async def test_asyncpg_migration_with_transactions(tmp_path: Path, postgres_serv migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -361,7 +361,7 @@ def down(): result = await driver.execute("SELECT * FROM users WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -370,7 +370,7 @@ async def test_asyncpg_config_migrate_up_method(tmp_path: Path, postgres_service migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -415,7 +415,7 @@ def down(): ) assert len(result.data) == 1 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -424,7 +424,7 @@ async def test_asyncpg_config_migrate_down_method(tmp_path: Path, postgres_servi migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -476,7 +476,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -485,7 +485,7 @@ async def test_asyncpg_config_get_current_migration_method(tmp_path: Path, postg migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -521,7 +521,7 @@ def down(): current_version = await config.get_current_migration() assert current_version == "0001" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -530,7 +530,7 @@ async def test_asyncpg_config_create_migration_method(tmp_path: Path, postgres_s migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -551,7 +551,7 @@ async def test_asyncpg_config_create_migration_method(tmp_path: Path, postgres_s assert len(migration_files) == 1 assert "add_users_table" in migration_files[0].name finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -560,7 +560,7 @@ async def test_asyncpg_config_stamp_migration_method(tmp_path: Path, postgres_se migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -599,7 +599,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -608,7 +608,7 @@ async def test_asyncpg_config_fix_migrations_dry_run(tmp_path: Path, postgres_se migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -644,5 +644,5 @@ def down(): sequential_file = migration_dir / "0001_timestamp_migration.py" assert not sequential_file.exists() finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_asyncpg/test_schema_migration.py b/tests/integration/test_adapters/test_asyncpg/test_schema_migration.py index b2e2eaba4..befb81fa2 100644 --- a/tests/integration/test_adapters/test_asyncpg/test_schema_migration.py +++ b/tests/integration/test_adapters/test_asyncpg/test_schema_migration.py @@ -12,7 +12,7 @@ def _create_config(postgres_service: PostgresService) -> AsyncpgConfig: """Create AsyncpgConfig from PostgresService fixture.""" return AsyncpgConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, diff --git a/tests/integration/test_adapters/test_duckdb/conftest.py b/tests/integration/test_adapters/test_duckdb/conftest.py index b03ae0442..54dc59315 100644 --- a/tests/integration/test_adapters/test_duckdb/conftest.py +++ b/tests/integration/test_adapters/test_duckdb/conftest.py @@ -11,7 +11,7 @@ def duckdb_basic_config() -> Generator[DuckDBConfig, None, None]: """Provide an in-memory DuckDB configuration.""" - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) try: yield config finally: diff --git a/tests/integration/test_adapters/test_duckdb/test_connection.py b/tests/integration/test_adapters/test_duckdb/test_connection.py index ff12364fb..69fb94ae7 100644 --- a/tests/integration/test_adapters/test_duckdb/test_connection.py +++ b/tests/integration/test_adapters/test_duckdb/test_connection.py @@ -44,7 +44,7 @@ def create_permissive_config(**kwargs: Any) -> DuckDBConfig: # Use a unique memory database identifier to avoid configuration conflicts connection_config["database"] = f":memory:{uuid4().hex}" - kwargs["pool_config"] = connection_config + kwargs["connection_config"] = connection_config return DuckDBConfig(**kwargs) @@ -120,7 +120,7 @@ def test_connection_with_data_processing_settings() -> None: def test_connection_with_instrumentation() -> None: """Test DuckDB connection with instrumentation configuration.""" - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) with config.provide_session() as session: result = session.execute("SELECT ? as test_value", (42)) @@ -138,7 +138,7 @@ def connection_hook(connection: DuckDBConnection) -> None: connection.execute("SET threads = 1") config = DuckDBConfig( - pool_config={"database": ":memory:"}, driver_features={"on_connection_create": connection_hook} + connection_config={"database": ":memory:"}, driver_features={"on_connection_create": connection_hook} ) registry = SQLSpec() @@ -169,9 +169,9 @@ def test_connection_read_only_mode() -> None: INSERT INTO test_readonly VALUES (1, 'test_data'); """) - if hasattr(setup_config, "pool_instance") and setup_config.pool_instance: - setup_config.pool_instance.close() - setup_config.pool_instance = None + if hasattr(setup_config, "connection_instance") and setup_config.connection_instance: + setup_config.connection_instance.close() + setup_config.connection_instance = None time.sleep(0.1) @@ -183,9 +183,9 @@ def test_connection_read_only_mode() -> None: assert result.data[0]["id"] == 1 assert result.data[0]["value"] == "test_data" - if hasattr(readonly_config, "pool_instance") and readonly_config.pool_instance: - readonly_config.pool_instance.close() - readonly_config.pool_instance = None + if hasattr(readonly_config, "connection_instance") and readonly_config.connection_instance: + readonly_config.connection_instance.close() + readonly_config.connection_instance = None finally: if os.path.exists(temp_db_path): @@ -282,12 +282,12 @@ def test_multiple_concurrent_connections() -> None: def test_config_with_pool_config_parameter(tmp_path: Path) -> None: - """Test that DuckDBConfig correctly accepts pool_config parameter.""" + """Test that DuckDBConfig correctly accepts connection_config parameter.""" db_path = tmp_path / "test.duckdb" - pool_config = {"database": str(db_path), "memory_limit": "256MB", "threads": 4} + connection_config = {"database": str(db_path), "memory_limit": "256MB", "threads": 4} - config = DuckDBConfig(pool_config=pool_config) + config = DuckDBConfig(connection_config=connection_config) try: connection_config = config._get_connection_config_dict() @@ -310,10 +310,10 @@ def test_config_with_pool_config_parameter(tmp_path: Path) -> None: def test_config_memory_database_shared_conversion() -> None: """Test that :memory: databases are converted to shared memory.""" - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) try: - assert config.pool_config["database"] == ":memory:shared_db" + assert config.connection_config["database"] == ":memory:shared_db" with config.provide_session() as session: result = session.execute("SELECT 'memory_test' as test") @@ -327,10 +327,10 @@ def test_config_memory_database_shared_conversion() -> None: def test_config_empty_database_conversion() -> None: """Test that empty database string is converted to shared memory.""" - config = DuckDBConfig(pool_config={"database": ""}) + config = DuckDBConfig(connection_config={"database": ""}) try: - assert config.pool_config["database"] == ":memory:shared_db" + assert config.connection_config["database"] == ":memory:shared_db" with config.provide_session() as session: result = session.execute("SELECT 'empty_test' as test") @@ -347,7 +347,7 @@ def test_config_default_database_shared() -> None: config = DuckDBConfig() try: - assert config.pool_config["database"] == ":memory:shared_db" + assert config.connection_config["database"] == ":memory:shared_db" with config.provide_session() as session: result = session.execute("SELECT 'default_test' as test") @@ -362,7 +362,7 @@ def test_config_consistency_with_other_adapters(tmp_path: Path) -> None: """Test that DuckDB config behaves consistently with SQLite/aiosqlite.""" db_path = tmp_path / "consistency_test.duckdb" - pool_config = { + connection_config = { "database": str(db_path), "memory_limit": "512MB", "threads": 2, @@ -370,7 +370,7 @@ def test_config_consistency_with_other_adapters(tmp_path: Path) -> None: "pool_max_size": 4, } - config = DuckDBConfig(pool_config=pool_config) + config = DuckDBConfig(connection_config=connection_config) try: connection_config = config._get_connection_config_dict() diff --git a/tests/integration/test_adapters/test_duckdb/test_driver_features.py b/tests/integration/test_adapters/test_duckdb/test_driver_features.py index ba3766cee..f5480f0b1 100644 --- a/tests/integration/test_adapters/test_duckdb/test_driver_features.py +++ b/tests/integration/test_adapters/test_duckdb/test_driver_features.py @@ -14,7 +14,7 @@ @pytest.fixture def duckdb_config() -> DuckDBConfig: """Create a basic DuckDB configuration.""" - return DuckDBConfig(pool_config={"database": ":memory:"}) + return DuckDBConfig(connection_config={"database": ":memory:"}) def test_default_uuid_conversion_enabled(duckdb_config: DuckDBConfig) -> None: @@ -39,7 +39,7 @@ def test_uuid_conversion_can_be_disabled() -> None: When disabled, UUID strings are passed as-is to DuckDB without conversion. DuckDB still returns UUID objects from UUID columns (native behavior). """ - config = DuckDBConfig(pool_config={"database": ":memory:"}, driver_features={"enable_uuid_conversion": False}) + config = DuckDBConfig(connection_config={"database": ":memory:"}, driver_features={"enable_uuid_conversion": False}) try: with config.provide_session() as session: session.execute("DROP TABLE IF EXISTS test") @@ -62,7 +62,7 @@ def test_custom_json_serializer_for_dict() -> None: def custom_json(obj: dict) -> str: return msgspec.json.encode(obj).decode("utf-8") - config = DuckDBConfig(pool_config={"database": ":memory:"}, driver_features={"json_serializer": custom_json}) + config = DuckDBConfig(connection_config={"database": ":memory:"}, driver_features={"json_serializer": custom_json}) try: with config.provide_session() as session: session.execute("DROP TABLE IF EXISTS test") @@ -82,7 +82,7 @@ def test_custom_json_serializer_for_list() -> None: def custom_json(obj: list) -> str: return msgspec.json.encode(obj).decode("utf-8") - config = DuckDBConfig(pool_config={"database": ":memory:"}, driver_features={"json_serializer": custom_json}) + config = DuckDBConfig(connection_config={"database": ":memory:"}, driver_features={"json_serializer": custom_json}) try: with config.provide_session() as session: session.execute("DROP TABLE IF EXISTS test") @@ -119,7 +119,7 @@ def custom_json(obj: dict | list) -> str: return json.dumps(obj, separators=(",", ":")) config = DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, driver_features={"json_serializer": custom_json, "enable_uuid_conversion": False}, ) try: @@ -143,7 +143,7 @@ def test_driver_features_passed_to_driver() -> None: custom_json = json.dumps config = DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, driver_features={"json_serializer": custom_json, "enable_uuid_conversion": False}, ) try: diff --git a/tests/integration/test_adapters/test_duckdb/test_exceptions.py b/tests/integration/test_adapters/test_duckdb/test_exceptions.py index 6c83f17e0..dde2dadc4 100644 --- a/tests/integration/test_adapters/test_duckdb/test_exceptions.py +++ b/tests/integration/test_adapters/test_duckdb/test_exceptions.py @@ -20,7 +20,7 @@ @pytest.fixture def duckdb_exception_session() -> Generator[DuckDBDriver, None, None]: """Create a DuckDB session for exception testing.""" - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) try: with config.provide_session() as session: diff --git a/tests/integration/test_adapters/test_duckdb/test_extensions/test_adk/test_store.py b/tests/integration/test_adapters/test_duckdb/test_extensions/test_adk/test_store.py index 633fb7a48..a685e08b8 100644 --- a/tests/integration/test_adapters/test_duckdb/test_extensions/test_adk/test_store.py +++ b/tests/integration/test_adapters/test_duckdb/test_extensions/test_adk/test_store.py @@ -30,7 +30,7 @@ def duckdb_adk_store(tmp_path: Path, worker_id: str) -> "Generator[DuckdbADKStor db_path = tmp_path / f"test_adk_{worker_id}.duckdb" try: config = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={"adk": {"session_table": "test_sessions", "events_table": "test_events"}}, ) store = DuckdbADKStore(config) @@ -337,7 +337,7 @@ def test_table_not_found_handling(tmp_path: Path, worker_id: str) -> None: """Test graceful handling when tables don't exist.""" db_path = tmp_path / f"test_no_tables_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) store = DuckdbADKStore(config) result = store.get_session("nonexistent") @@ -397,7 +397,7 @@ def test_owner_id_column_with_integer(tmp_path: Path, worker_id: str) -> None: """Test owner ID column with INTEGER type.""" db_path = tmp_path / f"test_owner_id_int_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE tenants (id INTEGER PRIMARY KEY, name VARCHAR)") @@ -405,7 +405,7 @@ def test_owner_id_column_with_integer(tmp_path: Path, worker_id: str) -> None: conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_with_tenant", @@ -440,7 +440,7 @@ def test_owner_id_column_with_ubigint(tmp_path: Path, worker_id: str) -> None: """Test owner ID column with DuckDB UBIGINT type.""" db_path = tmp_path / f"test_owner_id_ubigint_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE users (id UBIGINT PRIMARY KEY, email VARCHAR)") @@ -448,7 +448,7 @@ def test_owner_id_column_with_ubigint(tmp_path: Path, worker_id: str) -> None: conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_with_user", @@ -486,7 +486,7 @@ def test_owner_id_column_foreign_key_constraint(tmp_path: Path, worker_id: str) """Test that FK constraint is enforced.""" db_path = tmp_path / f"test_owner_id_constraint_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE organizations (id INTEGER PRIMARY KEY, name VARCHAR)") @@ -494,7 +494,7 @@ def test_owner_id_column_foreign_key_constraint(tmp_path: Path, worker_id: str) conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_with_org", @@ -529,14 +529,14 @@ def test_owner_id_column_without_value(tmp_path: Path, worker_id: str) -> None: """Test creating session without owner_id when column is configured but nullable.""" db_path = tmp_path / f"test_owner_id_nullable_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE accounts (id INTEGER PRIMARY KEY, name VARCHAR)") conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_nullable_fk", @@ -565,7 +565,7 @@ def test_owner_id_column_with_varchar(tmp_path: Path, worker_id: str) -> None: """Test owner ID column with VARCHAR type.""" db_path = tmp_path / f"test_owner_id_varchar_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE companies (code VARCHAR PRIMARY KEY, name VARCHAR)") @@ -573,7 +573,7 @@ def test_owner_id_column_with_varchar(tmp_path: Path, worker_id: str) -> None: conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_with_company", @@ -609,7 +609,7 @@ def test_owner_id_column_multiple_sessions(tmp_path: Path, worker_id: str) -> No """Test multiple sessions with same FK value.""" db_path = tmp_path / f"test_owner_id_multiple_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE departments (id INTEGER PRIMARY KEY, name VARCHAR)") @@ -617,7 +617,7 @@ def test_owner_id_column_multiple_sessions(tmp_path: Path, worker_id: str) -> No conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_with_dept", @@ -652,7 +652,7 @@ def test_owner_id_column_query_by_fk(tmp_path: Path, worker_id: str) -> None: """Test querying sessions by FK column value.""" db_path = tmp_path / f"test_owner_id_query_{worker_id}.duckdb" try: - config = DuckDBConfig(pool_config={"database": str(db_path)}) + config = DuckDBConfig(connection_config={"database": str(db_path)}) with config.provide_connection() as conn: conn.execute("CREATE TABLE projects (id INTEGER PRIMARY KEY, name VARCHAR)") @@ -660,7 +660,7 @@ def test_owner_id_column_query_by_fk(tmp_path: Path, worker_id: str) -> None: conn.commit() config_with_extension = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, extension_config={ "adk": { "session_table": "sessions_with_project", diff --git a/tests/integration/test_adapters/test_duckdb/test_extensions/test_litestar/test_store.py b/tests/integration/test_adapters/test_duckdb/test_extensions/test_litestar/test_store.py index 47ae2ba77..19cecc299 100644 --- a/tests/integration/test_adapters/test_duckdb/test_extensions/test_litestar/test_store.py +++ b/tests/integration/test_adapters/test_duckdb/test_extensions/test_litestar/test_store.py @@ -33,7 +33,8 @@ async def duckdb_store(tmp_path: Path, worker_id: str) -> AsyncGenerator[DuckdbS db_path = tmp_path / f"test_sessions_{worker_id}.duckdb" try: config = DuckDBConfig( - pool_config={"database": str(db_path)}, extension_config={"litestar": {"session_table": "test_sessions"}} + connection_config={"database": str(db_path)}, + extension_config={"litestar": {"session_table": "test_sessions"}}, ) store = DuckdbStore(config) await store.create_table() diff --git a/tests/integration/test_adapters/test_duckdb/test_migrations.py b/tests/integration/test_adapters/test_duckdb/test_migrations.py index d381816c9..ea368f17c 100644 --- a/tests/integration/test_adapters/test_duckdb/test_migrations.py +++ b/tests/integration/test_adapters/test_duckdb/test_migrations.py @@ -17,7 +17,7 @@ def test_duckdb_migration_full_workflow(tmp_path: Path) -> None: db_path = tmp_path / "test.duckdb" config = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -76,7 +76,7 @@ def test_duckdb_multiple_migrations_workflow(tmp_path: Path) -> None: db_path = tmp_path / "test.duckdb" config = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -170,7 +170,7 @@ def test_duckdb_migration_current_command(tmp_path: Path) -> None: db_path = tmp_path / "test.duckdb" config = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -205,7 +205,7 @@ def test_duckdb_migration_error_handling(tmp_path: Path) -> None: db_path = tmp_path / "test.duckdb" config = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -240,7 +240,7 @@ def test_duckdb_migration_with_transactions(tmp_path: Path) -> None: db_path = tmp_path / "test.duckdb" config = DuckDBConfig( - pool_config={"database": str(db_path)}, + connection_config={"database": str(db_path)}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) diff --git a/tests/integration/test_adapters/test_duckdb/test_mixed_parameter_styles.py b/tests/integration/test_adapters/test_duckdb/test_mixed_parameter_styles.py index 9fdcf1a7f..2fe6331e3 100644 --- a/tests/integration/test_adapters/test_duckdb/test_mixed_parameter_styles.py +++ b/tests/integration/test_adapters/test_duckdb/test_mixed_parameter_styles.py @@ -18,7 +18,7 @@ def duckdb_test_setup() -> Generator[tuple[DuckDBDriver, str], None, None]: Returns: A tuple of (session, table_name) """ - config = DuckDBConfig(pool_config={"database": ":memory:shared_db"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db"}) table_name = get_unique_table_name("test_table") diff --git a/tests/integration/test_adapters/test_duckdb/test_parameter_styles.py b/tests/integration/test_adapters/test_duckdb/test_parameter_styles.py index bff8a6cd3..455f744a2 100644 --- a/tests/integration/test_adapters/test_duckdb/test_parameter_styles.py +++ b/tests/integration/test_adapters/test_duckdb/test_parameter_styles.py @@ -20,7 +20,7 @@ def duckdb_parameters_session() -> "Generator[DuckDBDriver, None, None]": import uuid # Use unique database for each test to avoid data contamination - config = DuckDBConfig(pool_config={"database": f":memory:{uuid.uuid4().hex}"}) + config = DuckDBConfig(connection_config={"database": f":memory:{uuid.uuid4().hex}"}) with config.provide_session() as session: session.execute_script(""" @@ -498,7 +498,7 @@ def test_duckdb_parameter_performance(duckdb_parameters_session: DuckDBDriver) - def test_duckdb_none_parameters() -> None: """Test that None values in named parameters are handled correctly by DuckDB.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_none_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_none_test"}) with config.provide_session() as driver: # Create test table @@ -565,7 +565,7 @@ def test_duckdb_none_parameters() -> None: def test_duckdb_none_parameters_qmark_style() -> None: """Test None values with QMARK (?) parameter style - DuckDB default.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_qmark_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_qmark_test"}) with config.provide_session() as driver: # Create test table without primary key constraint to allow None insertion test @@ -597,7 +597,7 @@ def test_duckdb_none_parameters_qmark_style() -> None: def test_duckdb_none_parameters_numeric_style() -> None: """Test None values with NUMERIC ($1, $2) parameter style.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_numeric_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_numeric_test"}) with config.provide_session() as driver: # Create test table without primary key constraint @@ -629,7 +629,7 @@ def test_duckdb_none_parameters_numeric_style() -> None: def test_duckdb_all_none_parameters() -> None: """Test when all parameter values are None.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_all_none_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_all_none_test"}) with config.provide_session() as driver: # Create test table with auto-increment ID @@ -669,7 +669,7 @@ def test_duckdb_all_none_parameters() -> None: def test_duckdb_none_with_execute_many() -> None: """Test None values work correctly with execute_many.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_many_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_many_test"}) with config.provide_session() as driver: # Create test table @@ -710,7 +710,7 @@ def test_duckdb_none_with_execute_many() -> None: def test_duckdb_none_in_where_clause() -> None: """Test None values in WHERE clauses work correctly.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_where_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_where_test"}) with config.provide_session() as driver: # Create test table @@ -748,7 +748,7 @@ def test_duckdb_none_in_where_clause() -> None: def test_duckdb_none_complex_parameter_scenarios() -> None: """Test complex scenarios with None parameters that might cause issues.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_complex_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_complex_test"}) with config.provide_session() as driver: # Create test table @@ -809,7 +809,7 @@ def test_duckdb_none_complex_parameter_scenarios() -> None: def test_duckdb_none_parameter_edge_cases() -> None: """Test edge cases that might reveal parameter handling bugs.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_edge_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_edge_test"}) with config.provide_session() as driver: # Test 1: Empty parameter list with None @@ -857,7 +857,7 @@ def test_duckdb_parameter_count_mismatch_with_none() -> None: This test verifies the bug mentioned in the original issue where parameter count mismatches might be missed when None values are present. """ - config = DuckDBConfig(pool_config={"database": ":memory:shared_db_param_count_test"}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_db_param_count_test"}) with config.provide_session() as driver: driver.execute("CREATE TABLE test_param_count (col1 VARCHAR, col2 INTEGER)") diff --git a/tests/integration/test_adapters/test_duckdb/test_pooling.py b/tests/integration/test_adapters/test_duckdb/test_pooling.py index ecc027f5e..6578d48c6 100644 --- a/tests/integration/test_adapters/test_duckdb/test_pooling.py +++ b/tests/integration/test_adapters/test_duckdb/test_pooling.py @@ -13,10 +13,10 @@ def test_shared_memory_pooling() -> None: """Test that shared memory databases allow pooling.""" - config = DuckDBConfig(pool_config={"database": ":memory:shared_test", "pool_min_size": 2, "pool_max_size": 5}) + config = DuckDBConfig(connection_config={"database": ":memory:shared_test", "pool_min_size": 2, "pool_max_size": 5}) - assert config.pool_config["pool_min_size"] == 2 - assert config.pool_config["pool_max_size"] == 5 + assert config.connection_config["pool_min_size"] == 2 + assert config.connection_config["pool_max_size"] == 5 with config.provide_session() as session1: session1.execute("DROP TABLE IF EXISTS shared_test") @@ -37,12 +37,12 @@ def test_shared_memory_pooling() -> None: def test_regular_memory_auto_conversion() -> None: """Test that regular memory databases are auto-converted to shared memory with pooling enabled.""" - config = DuckDBConfig(pool_config={"database": ":memory:", "pool_min_size": 5, "pool_max_size": 10}) + config = DuckDBConfig(connection_config={"database": ":memory:", "pool_min_size": 5, "pool_max_size": 10}) - assert config.pool_config["pool_min_size"] == 5 - assert config.pool_config["pool_max_size"] == 10 + assert config.connection_config["pool_min_size"] == 5 + assert config.connection_config["pool_max_size"] == 10 - database = config.pool_config["database"] + database = config.connection_config["database"] assert database == ":memory:shared_db" with config.provide_session() as session1: @@ -67,10 +67,10 @@ def test_file_database_pooling() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp_file: db_path = tmp_file.name - config = DuckDBConfig(pool_config={"database": db_path, "pool_min_size": 2, "pool_max_size": 4}) + config = DuckDBConfig(connection_config={"database": db_path, "pool_min_size": 2, "pool_max_size": 4}) - assert config.pool_config["pool_min_size"] == 2 - assert config.pool_config["pool_max_size"] == 4 + assert config.connection_config["pool_min_size"] == 2 + assert config.connection_config["pool_max_size"] == 4 with config.provide_session() as session1: session1.execute("CREATE TABLE file_test (id INTEGER, data TEXT)") @@ -91,7 +91,7 @@ def test_file_database_pooling() -> None: def test_connection_pool_health_checks() -> None: """Test that the connection pool performs health checks correctly.""" - config = DuckDBConfig(pool_config={"database": ":memory:health_test", "pool_min_size": 1, "pool_max_size": 3}) + config = DuckDBConfig(connection_config={"database": ":memory:health_test", "pool_min_size": 1, "pool_max_size": 3}) pool = config.provide_pool() with pool.get_connection() as conn: @@ -104,9 +104,9 @@ def test_connection_pool_health_checks() -> None: def test_empty_database_conversion() -> None: """Test that empty database string gets converted properly.""" - config = DuckDBConfig(pool_config={"database": ""}) + config = DuckDBConfig(connection_config={"database": ""}) - database = config.pool_config["database"] + database = config.connection_config["database"] assert database.startswith(":memory:") assert len(database) == len(":memory:shared_db") @@ -119,7 +119,7 @@ def test_default_config_conversion() -> None: """Test that default config (no connection_config) works with shared memory.""" config = DuckDBConfig() - database = config.pool_config["database"] + database = config.connection_config["database"] assert database.startswith(":memory:shared_db") assert len(database) == len(":memory:shared_db") diff --git a/tests/integration/test_adapters/test_oracledb/conftest.py b/tests/integration/test_adapters/test_oracledb/conftest.py index 2403e3689..ed449d665 100644 --- a/tests/integration/test_adapters/test_oracledb/conftest.py +++ b/tests/integration/test_adapters/test_oracledb/conftest.py @@ -10,7 +10,7 @@ @pytest.fixture -def oracle_pool_config(oracle_23ai_service: OracleService) -> "dict[str, Any]": +def oracle_connection_config(oracle_23ai_service: OracleService) -> "dict[str, Any]": """Shared Oracle pool configuration.""" return { @@ -23,20 +23,20 @@ def oracle_pool_config(oracle_23ai_service: OracleService) -> "dict[str, Any]": @pytest.fixture -def oracle_sync_config(oracle_pool_config: "dict[str, Any]") -> OracleSyncConfig: +def oracle_sync_config(oracle_connection_config: "dict[str, Any]") -> OracleSyncConfig: """Create Oracle sync configuration.""" - return OracleSyncConfig(pool_config=dict(oracle_pool_config)) + return OracleSyncConfig(connection_config=dict(oracle_connection_config)) @pytest.fixture -async def oracle_async_config(oracle_pool_config: "dict[str, Any]") -> "AsyncGenerator[OracleAsyncConfig, None]": +async def oracle_async_config(oracle_connection_config: "dict[str, Any]") -> "AsyncGenerator[OracleAsyncConfig, None]": """Create Oracle async configuration.""" - pool_config = dict(oracle_pool_config) - pool_config.setdefault("min", 1) - pool_config.setdefault("max", 5) - config = OracleAsyncConfig(pool_config=pool_config) + connection_config = dict(oracle_connection_config) + connection_config.setdefault("min", 1) + connection_config.setdefault("max", 5) + config = OracleAsyncConfig(connection_config=connection_config) try: yield config finally: diff --git a/tests/integration/test_adapters/test_oracledb/test_connection.py b/tests/integration/test_adapters/test_oracledb/test_connection.py index 5dd2a9d24..1406314c2 100644 --- a/tests/integration/test_adapters/test_oracledb/test_connection.py +++ b/tests/integration/test_adapters/test_oracledb/test_connection.py @@ -11,7 +11,7 @@ async def test_async_connection(oracle_23ai_service: OracleService) -> None: """Test async connection components for OracleDB.""" async_config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -35,7 +35,7 @@ async def test_async_connection(oracle_23ai_service: OracleService) -> None: # Test pool with connection parameters another_config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -61,7 +61,7 @@ async def test_async_connection(oracle_23ai_service: OracleService) -> None: def test_sync_connection(oracle_23ai_service: OracleService) -> None: """Test sync connection components for OracleDB.""" sync_config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -85,7 +85,7 @@ def test_sync_connection(oracle_23ai_service: OracleService) -> None: # Test pool with connection parameters another_config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, diff --git a/tests/integration/test_adapters/test_oracledb/test_driver_async.py b/tests/integration/test_adapters/test_oracledb/test_driver_async.py index e69de5761..61d1a53db 100644 --- a/tests/integration/test_adapters/test_oracledb/test_driver_async.py +++ b/tests/integration/test_adapters/test_oracledb/test_driver_async.py @@ -455,7 +455,8 @@ class Product(msgspec.Struct): async def test_async_uppercase_columns_when_disabled(oracle_async_config: OracleAsyncConfig) -> None: """Ensure disabling lowercase feature preserves uppercase columns.""" custom_config = OracleAsyncConfig( - pool_config=dict(oracle_async_config.pool_config), driver_features={"enable_lowercase_column_names": False} + connection_config=dict(oracle_async_config.connection_config), + driver_features={"enable_lowercase_column_names": False}, ) async with custom_config.provide_session() as session: diff --git a/tests/integration/test_adapters/test_oracledb/test_driver_sync.py b/tests/integration/test_adapters/test_oracledb/test_driver_sync.py index 4b85c37be..e85592e53 100644 --- a/tests/integration/test_adapters/test_oracledb/test_driver_sync.py +++ b/tests/integration/test_adapters/test_oracledb/test_driver_sync.py @@ -449,7 +449,8 @@ class Product(msgspec.Struct): def test_sync_uppercase_columns_when_disabled(oracle_sync_config: OracleSyncConfig) -> None: """Ensure disabling lowercase feature preserves uppercase columns.""" custom_config = OracleSyncConfig( - pool_config=dict(oracle_sync_config.pool_config), driver_features={"enable_lowercase_column_names": False} + connection_config=dict(oracle_sync_config.connection_config), + driver_features={"enable_lowercase_column_names": False}, ) with custom_config.provide_session() as session: diff --git a/tests/integration/test_adapters/test_oracledb/test_exceptions.py b/tests/integration/test_adapters/test_oracledb/test_exceptions.py index 0a028299d..553abb018 100644 --- a/tests/integration/test_adapters/test_oracledb/test_exceptions.py +++ b/tests/integration/test_adapters/test_oracledb/test_exceptions.py @@ -15,7 +15,7 @@ def oracle_sync_exception_session(oracle_service: OracleService) -> Generator[OracleSyncDriver, None, None]: """Create an Oracle sync session for exception testing.""" config = OracleSyncConfig( - pool_config={ + connection_config={ "user": oracle_service.user, "password": oracle_service.password, "dsn": f"{oracle_service.host}:{oracle_service.port}/{oracle_service.service_name}", @@ -33,7 +33,7 @@ def oracle_sync_exception_session(oracle_service: OracleService) -> Generator[Or async def oracle_async_exception_session(oracle_service: OracleService) -> AsyncGenerator[OracleAsyncDriver, None]: """Create an Oracle async session for exception testing.""" config = OracleAsyncConfig( - pool_config={ + connection_config={ "user": oracle_service.user, "password": oracle_service.password, "dsn": f"{oracle_service.host}:{oracle_service.port}/{oracle_service.service_name}", diff --git a/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_inmemory.py b/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_inmemory.py index cc15fc142..26d86165f 100644 --- a/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_inmemory.py +++ b/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_inmemory.py @@ -22,7 +22,7 @@ async def test_inmemory_enabled_creates_sessions_table_with_inmemory_async( ) -> None: """Test that in_memory=True creates sessions table with INMEMORY clause.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"adk": {"in_memory": True}} + connection_config=oracle_async_config.connection_config, extension_config={"adk": {"in_memory": True}} ) store = OracleAsyncADKStore(config) @@ -64,7 +64,7 @@ async def test_inmemory_enabled_creates_events_table_with_inmemory_async( ) -> None: """Test that in_memory=True creates events table with INMEMORY clause.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"adk": {"in_memory": True}} + connection_config=oracle_async_config.connection_config, extension_config={"adk": {"in_memory": True}} ) store = OracleAsyncADKStore(config) @@ -102,7 +102,7 @@ async def test_inmemory_enabled_creates_events_table_with_inmemory_async( async def test_inmemory_disabled_creates_tables_without_inmemory_async(oracle_async_config: OracleAsyncConfig) -> None: """Test that in_memory=False (default) creates tables without INMEMORY clause.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"adk": {"in_memory": False}} + connection_config=oracle_async_config.connection_config, extension_config={"adk": {"in_memory": False}} ) store = OracleAsyncADKStore(config) @@ -141,7 +141,7 @@ async def test_inmemory_disabled_creates_tables_without_inmemory_async(oracle_as @pytest.mark.oracledb async def test_inmemory_default_disabled_async(oracle_async_config: OracleAsyncConfig) -> None: """Test that in_memory defaults to False when not specified.""" - config = OracleAsyncConfig(pool_config=oracle_async_config.pool_config, extension_config={"adk": {}}) + config = OracleAsyncConfig(connection_config=oracle_async_config.connection_config, extension_config={"adk": {}}) store = OracleAsyncADKStore(config) await store.create_tables() @@ -198,7 +198,7 @@ async def test_inmemory_with_owner_id_column_async(oracle_async_config: OracleAs try: config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, + connection_config=oracle_async_config.connection_config, extension_config={ "adk": {"in_memory": True, "owner_id_column": "owner_id NUMBER(10) NOT NULL REFERENCES test_owners(id)"} }, @@ -263,7 +263,7 @@ async def test_inmemory_with_owner_id_column_async(oracle_async_config: OracleAs async def test_inmemory_tables_functional_async(oracle_async_config: OracleAsyncConfig) -> None: """Test that INMEMORY tables work correctly for session operations.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"adk": {"in_memory": True}} + connection_config=oracle_async_config.connection_config, extension_config={"adk": {"in_memory": True}} ) store = OracleAsyncADKStore(config) @@ -304,7 +304,9 @@ async def test_inmemory_tables_functional_async(oracle_async_config: OracleAsync @pytest.mark.oracledb def test_inmemory_enabled_sync(oracle_sync_config: OracleSyncConfig) -> None: """Test that in_memory=True works with sync store.""" - config = OracleSyncConfig(pool_config=oracle_sync_config.pool_config, extension_config={"adk": {"in_memory": True}}) + config = OracleSyncConfig( + connection_config=oracle_sync_config.connection_config, extension_config={"adk": {"in_memory": True}} + ) store = OracleSyncADKStore(config) store.create_tables() @@ -344,7 +346,7 @@ def test_inmemory_enabled_sync(oracle_sync_config: OracleSyncConfig) -> None: def test_inmemory_disabled_sync(oracle_sync_config: OracleSyncConfig) -> None: """Test that in_memory=False works with sync store.""" config = OracleSyncConfig( - pool_config=oracle_sync_config.pool_config, extension_config={"adk": {"in_memory": False}} + connection_config=oracle_sync_config.connection_config, extension_config={"adk": {"in_memory": False}} ) store = OracleSyncADKStore(config) @@ -382,7 +384,9 @@ def test_inmemory_disabled_sync(oracle_sync_config: OracleSyncConfig) -> None: @pytest.mark.oracledb def test_inmemory_tables_functional_sync(oracle_sync_config: OracleSyncConfig) -> None: """Test that INMEMORY tables work correctly in sync mode.""" - config = OracleSyncConfig(pool_config=oracle_sync_config.pool_config, extension_config={"adk": {"in_memory": True}}) + config = OracleSyncConfig( + connection_config=oracle_sync_config.connection_config, extension_config={"adk": {"in_memory": True}} + ) store = OracleSyncADKStore(config) store.create_tables() diff --git a/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_oracle_specific.py b/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_oracle_specific.py index 0d8197dc3..2c22fa0ca 100644 --- a/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_oracle_specific.py +++ b/tests/integration/test_adapters/test_oracledb/test_extensions/test_adk/test_oracle_specific.py @@ -326,7 +326,7 @@ async def oracle_store_with_fk(self, oracle_config_with_tenant_table: Any) -> An """Create async Oracle ADK store with owner_id_column.""" base_config = oracle_config_with_tenant_table config_with_extension = OracleAsyncConfig( - pool_config=base_config.pool_config, + connection_config=base_config.connection_config, extension_config={"adk": {"owner_id_column": "tenant_id NUMBER(10) NOT NULL REFERENCES tenants(id)"}}, ) store = OracleAsyncADKStore(config_with_extension) @@ -381,7 +381,7 @@ async def test_create_session_without_owner_id_when_required(self, oracle_store_ async def test_fk_column_name_parsing(self, oracle_async_config: OracleAsyncConfig) -> None: """Test _owner_id_column_name is correctly parsed from DDL.""" config_with_extension = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, + connection_config=oracle_async_config.connection_config, extension_config={"adk": {"owner_id_column": "account_id NUMBER(19) REFERENCES accounts(id)"}}, ) store = OracleAsyncADKStore(config_with_extension) @@ -389,7 +389,7 @@ async def test_fk_column_name_parsing(self, oracle_async_config: OracleAsyncConf assert store.owner_id_column_ddl == "account_id NUMBER(19) REFERENCES accounts(id)" config_with_extension2 = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, + connection_config=oracle_async_config.connection_config, extension_config={"adk": {"owner_id_column": "org_uuid RAW(16) REFERENCES organizations(id)"}}, ) store2 = OracleAsyncADKStore(config_with_extension2) @@ -498,7 +498,7 @@ def oracle_store_sync_with_fk(self, oracle_config_with_users_table: Any) -> Any: """Create sync Oracle ADK store with owner_id_column.""" base_config = oracle_config_with_users_table config_with_extension = OracleSyncConfig( - pool_config=base_config.pool_config, + connection_config=base_config.connection_config, extension_config={"adk": {"owner_id_column": "owner_id NUMBER(19) REFERENCES users(id) ON DELETE CASCADE"}}, ) store = OracleSyncADKStore(config_with_extension) diff --git a/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_inmemory.py b/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_inmemory.py index 99b858eac..28d62ad35 100644 --- a/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_inmemory.py +++ b/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_inmemory.py @@ -21,7 +21,7 @@ async def test_inmemory_enabled_creates_session_table_with_inmemory_async( ) -> None: """Test that in_memory=True creates session table with INMEMORY PRIORITY HIGH clause.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"litestar": {"in_memory": True}} + connection_config=oracle_async_config.connection_config, extension_config={"litestar": {"in_memory": True}} ) store = OracleAsyncStore(config) @@ -61,7 +61,7 @@ async def test_inmemory_enabled_creates_session_table_with_inmemory_async( async def test_inmemory_disabled_creates_table_without_inmemory_async(oracle_async_config: OracleAsyncConfig) -> None: """Test that in_memory=False (default) creates table without INMEMORY clause.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"litestar": {"in_memory": False}} + connection_config=oracle_async_config.connection_config, extension_config={"litestar": {"in_memory": False}} ) store = OracleAsyncStore(config) @@ -97,7 +97,9 @@ async def test_inmemory_disabled_creates_table_without_inmemory_async(oracle_asy @pytest.mark.oracledb async def test_inmemory_default_disabled_async(oracle_async_config: OracleAsyncConfig) -> None: """Test that in_memory defaults to False when not specified.""" - config = OracleAsyncConfig(pool_config=oracle_async_config.pool_config, extension_config={"litestar": {}}) + config = OracleAsyncConfig( + connection_config=oracle_async_config.connection_config, extension_config={"litestar": {}} + ) store = OracleAsyncStore(config) await store.create_table() @@ -133,7 +135,7 @@ async def test_inmemory_default_disabled_async(oracle_async_config: OracleAsyncC async def test_inmemory_table_functional_async(oracle_async_config: OracleAsyncConfig) -> None: """Test that INMEMORY table works correctly for session operations.""" config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, extension_config={"litestar": {"in_memory": True}} + connection_config=oracle_async_config.connection_config, extension_config={"litestar": {"in_memory": True}} ) store = OracleAsyncStore(config) @@ -172,7 +174,7 @@ async def test_inmemory_table_functional_async(oracle_async_config: OracleAsyncC def test_inmemory_enabled_sync(oracle_sync_config: OracleSyncConfig) -> None: """Test that in_memory=True works with sync store.""" config = OracleSyncConfig( - pool_config=oracle_sync_config.pool_config, extension_config={"litestar": {"in_memory": True}} + connection_config=oracle_sync_config.connection_config, extension_config={"litestar": {"in_memory": True}} ) store = OracleSyncStore(config) @@ -212,7 +214,7 @@ def test_inmemory_enabled_sync(oracle_sync_config: OracleSyncConfig) -> None: def test_inmemory_disabled_sync(oracle_sync_config: OracleSyncConfig) -> None: """Test that in_memory=False works with sync store.""" config = OracleSyncConfig( - pool_config=oracle_sync_config.pool_config, extension_config={"litestar": {"in_memory": False}} + connection_config=oracle_sync_config.connection_config, extension_config={"litestar": {"in_memory": False}} ) store = OracleSyncStore(config) @@ -251,7 +253,7 @@ def test_inmemory_disabled_sync(oracle_sync_config: OracleSyncConfig) -> None: def test_inmemory_table_functional_sync(oracle_sync_config: OracleSyncConfig) -> None: """Test that INMEMORY table works correctly in sync mode.""" config = OracleSyncConfig( - pool_config=oracle_sync_config.pool_config, extension_config={"litestar": {"in_memory": True}} + connection_config=oracle_sync_config.connection_config, extension_config={"litestar": {"in_memory": True}} ) store = OracleSyncStore(config) diff --git a/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_async.py b/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_async.py index 6282464ec..f73c8b15e 100644 --- a/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_async.py +++ b/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_async.py @@ -17,7 +17,7 @@ async def oracle_store(oracle_23ai_service: OracleService) -> "AsyncGenerator[OracleAsyncStore, None]": """Create Oracle store with test database.""" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -34,7 +34,7 @@ async def oracle_store(oracle_23ai_service: OracleService) -> "AsyncGenerator[Or yield store await store.delete_all() finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_sync.py b/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_sync.py index 45a3cb7ea..87c4aa929 100644 --- a/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_sync.py +++ b/tests/integration/test_adapters/test_oracledb/test_extensions/test_litestar/test_store_sync.py @@ -17,7 +17,7 @@ async def oracle_sync_store(oracle_23ai_service: OracleService) -> AsyncGenerator[OracleSyncStore, None]: """Create Oracle sync store with test database.""" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -34,7 +34,7 @@ async def oracle_sync_store(oracle_23ai_service: OracleService) -> AsyncGenerato yield store await store.delete_all() finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() diff --git a/tests/integration/test_adapters/test_oracledb/test_merge.py b/tests/integration/test_adapters/test_oracledb/test_merge.py index 31ceebf36..1efc9f896 100644 --- a/tests/integration/test_adapters/test_oracledb/test_merge.py +++ b/tests/integration/test_adapters/test_oracledb/test_merge.py @@ -16,7 +16,7 @@ async def oracle_merge_async_session(oracle_23ai_service: OracleService) -> AsyncGenerator[OracleAsyncDriver, None]: """Create Oracle async session with test table for MERGE tests.""" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -72,7 +72,7 @@ async def oracle_merge_async_session(oracle_23ai_service: OracleService) -> Asyn def oracle_merge_sync_session(oracle_23ai_service: OracleService) -> Generator[OracleSyncDriver, None, None]: """Create Oracle sync session with test table for MERGE tests.""" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, diff --git a/tests/integration/test_adapters/test_oracledb/test_migrations.py b/tests/integration/test_adapters/test_oracledb/test_migrations.py index 185900c96..8335ec3fe 100644 --- a/tests/integration/test_adapters/test_oracledb/test_migrations.py +++ b/tests/integration/test_adapters/test_oracledb/test_migrations.py @@ -22,7 +22,7 @@ def test_oracledb_sync_migration_full_workflow(tmp_path: Path, oracle_23ai_servi migration_dir = tmp_path / "migrations" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -81,7 +81,7 @@ def down(): result = driver.execute(f"SELECT table_name FROM user_tables WHERE table_name = '{users_table.upper()}'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -95,7 +95,7 @@ async def test_oracledb_async_migration_full_workflow(tmp_path: Path, oracle_23a migration_dir = tmp_path / "migrations" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -162,7 +162,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -177,7 +177,7 @@ def test_oracledb_sync_multiple_migrations_workflow(tmp_path: Path, oracle_23ai_ migration_dir = tmp_path / "migrations" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -273,7 +273,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -288,7 +288,7 @@ async def test_oracledb_async_multiple_migrations_workflow(tmp_path: Path, oracl migration_dir = tmp_path / "migrations" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -388,7 +388,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -402,7 +402,7 @@ def test_oracledb_sync_migration_current_command(tmp_path: Path, oracle_23ai_ser migration_dir = tmp_path / "migrations" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -448,7 +448,7 @@ def down(): current_version = commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -462,7 +462,7 @@ async def test_oracledb_async_migration_current_command(tmp_path: Path, oracle_2 migration_dir = tmp_path / "migrations" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -510,7 +510,7 @@ def down(): current_version = await commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -523,7 +523,7 @@ def test_oracledb_sync_migration_error_handling(tmp_path: Path, oracle_23ai_serv migration_dir = tmp_path / "migrations" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -557,7 +557,7 @@ def down(): count = driver.select_value(f"SELECT COUNT(*) FROM {migration_table}") assert count == 0, f"Expected empty migration table after failed migration, but found {count} records" finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -570,7 +570,7 @@ async def test_oracledb_async_migration_error_handling(tmp_path: Path, oracle_23 migration_dir = tmp_path / "migrations" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -606,7 +606,7 @@ def down(): count = await driver.select_value(f"SELECT COUNT(*) FROM {migration_table}") assert count == 0, f"Expected empty migration table after failed migration, but found {count} records" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -620,7 +620,7 @@ def test_oracledb_sync_migration_with_transactions(tmp_path: Path, oracle_23ai_s migration_dir = tmp_path / "migrations" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -689,7 +689,7 @@ def down(): result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -703,7 +703,7 @@ async def test_oracledb_async_migration_with_transactions(tmp_path: Path, oracle migration_dir = tmp_path / "migrations" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -774,7 +774,7 @@ def down(): result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -790,7 +790,7 @@ async def test_oracledb_async_schema_migration_from_old_format( migration_table = f"sqlspec_migrations_{test_id}" config = OracleAsyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -851,7 +851,7 @@ async def test_oracledb_async_schema_migration_from_old_format( assert len(migration_data.data) == 1 assert migration_data.data[0]["version_num"] == "0001" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -865,7 +865,7 @@ def test_oracledb_sync_schema_migration_from_old_format(tmp_path: Path, oracle_2 migration_table = f"sqlspec_migrations_{test_id}" config = OracleSyncConfig( - pool_config={ + connection_config={ "host": oracle_23ai_service.host, "port": oracle_23ai_service.port, "service_name": oracle_23ai_service.service_name, @@ -923,5 +923,5 @@ def test_oracledb_sync_schema_migration_from_old_format(tmp_path: Path, oracle_2 assert len(migration_data.data) == 1 assert migration_data.data[0]["version_num"] == "0001" finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() diff --git a/tests/integration/test_adapters/test_oracledb/test_numpy_vectors.py b/tests/integration/test_adapters/test_oracledb/test_numpy_vectors.py index 1bfe8d7e5..566d4320d 100644 --- a/tests/integration/test_adapters/test_oracledb/test_numpy_vectors.py +++ b/tests/integration/test_adapters/test_oracledb/test_numpy_vectors.py @@ -14,14 +14,16 @@ @pytest.fixture def oracle_numpy_sync_config(oracle_sync_config: OracleSyncConfig) -> OracleSyncConfig: """Create Oracle sync config with NumPy vectors enabled.""" - return OracleSyncConfig(pool_config=oracle_sync_config.pool_config, driver_features={"enable_numpy_vectors": True}) + return OracleSyncConfig( + connection_config=oracle_sync_config.connection_config, driver_features={"enable_numpy_vectors": True} + ) @pytest.fixture def oracle_numpy_async_config(oracle_async_config: OracleAsyncConfig) -> OracleAsyncConfig: """Create Oracle async config with NumPy vectors enabled.""" return OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, driver_features={"enable_numpy_vectors": True} + connection_config=oracle_async_config.connection_config, driver_features={"enable_numpy_vectors": True} ) @@ -273,7 +275,7 @@ async def test_numpy_disabled_by_default(oracle_async_config: OracleAsyncConfig) import numpy as np config_no_numpy = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, driver_features={"enable_numpy_vectors": False} + connection_config=oracle_async_config.connection_config, driver_features={"enable_numpy_vectors": False} ) async with config_no_numpy.provide_session() as session: diff --git a/tests/integration/test_adapters/test_oracledb/test_uuid_binary.py b/tests/integration/test_adapters/test_oracledb/test_uuid_binary.py index bb8bbcfdb..61169ba10 100644 --- a/tests/integration/test_adapters/test_oracledb/test_uuid_binary.py +++ b/tests/integration/test_adapters/test_oracledb/test_uuid_binary.py @@ -13,19 +13,25 @@ @pytest.fixture def oracle_uuid_sync_config(oracle_sync_config: OracleSyncConfig) -> OracleSyncConfig: """Create Oracle sync config with UUID binary enabled.""" - return OracleSyncConfig(pool_config=oracle_sync_config.pool_config, driver_features={"enable_uuid_binary": True}) + return OracleSyncConfig( + connection_config=oracle_sync_config.connection_config, driver_features={"enable_uuid_binary": True} + ) @pytest.fixture def oracle_uuid_async_config(oracle_async_config: OracleAsyncConfig) -> OracleAsyncConfig: """Create Oracle async config with UUID binary enabled.""" - return OracleAsyncConfig(pool_config=oracle_async_config.pool_config, driver_features={"enable_uuid_binary": True}) + return OracleAsyncConfig( + connection_config=oracle_async_config.connection_config, driver_features={"enable_uuid_binary": True} + ) @pytest.fixture def oracle_uuid_disabled_async_config(oracle_async_config: OracleAsyncConfig) -> OracleAsyncConfig: """Create Oracle async config with UUID binary explicitly disabled.""" - return OracleAsyncConfig(pool_config=oracle_async_config.pool_config, driver_features={"enable_uuid_binary": False}) + return OracleAsyncConfig( + connection_config=oracle_async_config.connection_config, driver_features={"enable_uuid_binary": False} + ) async def test_create_uuid_table(oracle_async_session: OracleAsyncDriver) -> None: @@ -217,7 +223,7 @@ async def test_uuid_numpy_coexistence(oracle_async_config: OracleAsyncConfig) -> import numpy as np config = OracleAsyncConfig( - pool_config=oracle_async_config.pool_config, + connection_config=oracle_async_config.connection_config, driver_features={"enable_numpy_vectors": True, "enable_uuid_binary": True}, ) diff --git a/tests/integration/test_adapters/test_psqlpy/conftest.py b/tests/integration/test_adapters/test_psqlpy/conftest.py index 7cf9e6388..d091c30b0 100644 --- a/tests/integration/test_adapters/test_psqlpy/conftest.py +++ b/tests/integration/test_adapters/test_psqlpy/conftest.py @@ -21,7 +21,7 @@ async def psqlpy_config(postgres_service: PostgresService) -> AsyncGenerator[Psq f"postgres://{postgres_service.user}:{postgres_service.password}@" f"{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" ) - config = PsqlpyConfig(pool_config={"dsn": dsn, "max_db_pool_size": 5}) + config = PsqlpyConfig(connection_config={"dsn": dsn, "max_db_pool_size": 5}) try: yield config finally: diff --git a/tests/integration/test_adapters/test_psqlpy/test_migrations.py b/tests/integration/test_adapters/test_psqlpy/test_migrations.py index 5d0275dbc..279098f60 100644 --- a/tests/integration/test_adapters/test_psqlpy/test_migrations.py +++ b/tests/integration/test_adapters/test_psqlpy/test_migrations.py @@ -21,7 +21,7 @@ async def test_psqlpy_migration_full_workflow(tmp_path: Path, postgres_service: migration_dir = tmp_path / "migrations" config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -82,7 +82,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -97,7 +97,7 @@ async def test_psqlpy_multiple_migrations_workflow(tmp_path: Path, postgres_serv migration_dir = tmp_path / "migrations" config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -190,7 +190,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -204,7 +204,7 @@ async def test_psqlpy_migration_current_command(tmp_path: Path, postgres_service migration_dir = tmp_path / "migrations" config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -246,7 +246,7 @@ def down(): current_version = await commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -255,7 +255,7 @@ async def test_psqlpy_migration_error_handling(tmp_path: Path, postgres_service: migration_dir = tmp_path / "migrations" config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations_psqlpy"}, @@ -289,7 +289,7 @@ def down(): except Exception as e: assert "no such" in str(e).lower() or "does not exist" in str(e).lower() finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -303,7 +303,7 @@ async def test_psqlpy_migration_with_transactions(tmp_path: Path, postgres_servi migration_dir = tmp_path / "migrations" config = PsqlpyConfig( - pool_config={ + connection_config={ "dsn": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -368,5 +368,5 @@ def down(): result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/conftest.py b/tests/integration/test_adapters/test_psycopg/conftest.py index 44e7a7c2b..8abf2e40e 100644 --- a/tests/integration/test_adapters/test_psycopg/conftest.py +++ b/tests/integration/test_adapters/test_psycopg/conftest.py @@ -15,13 +15,13 @@ def psycopg_sync_config(postgres_service: PostgresService) -> "Generator[PsycopgSyncConfig, None, None]": """Create a psycopg sync configuration.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" } ) yield config - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -29,12 +29,12 @@ def psycopg_sync_config(postgres_service: PostgresService) -> "Generator[Psycopg async def psycopg_async_config(postgres_service: PostgresService) -> "AsyncGenerator[PsycopgAsyncConfig, None]": """Create a psycopg async configuration.""" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" } ) try: yield config finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/test_arrow.py b/tests/integration/test_adapters/test_psycopg/test_arrow.py index d85005da7..e6d368391 100644 --- a/tests/integration/test_adapters/test_psycopg/test_arrow.py +++ b/tests/integration/test_adapters/test_psycopg/test_arrow.py @@ -14,8 +14,8 @@ @pytest.fixture async def psycopg_config(psycopg_async_config: PsycopgAsyncConfig) -> PsycopgAsyncConfig: """Create Psycopg async configuration for testing.""" - psycopg_async_config.pool_config.setdefault("min_size", 1) - psycopg_async_config.pool_config.setdefault("max_size", 2) + psycopg_async_config.connection_config.setdefault("min_size", 1) + psycopg_async_config.connection_config.setdefault("max_size", 2) return psycopg_async_config diff --git a/tests/integration/test_adapters/test_psycopg/test_async_copy.py b/tests/integration/test_adapters/test_psycopg/test_async_copy.py index c36cd2354..8eadbaf86 100644 --- a/tests/integration/test_adapters/test_psycopg/test_async_copy.py +++ b/tests/integration/test_adapters/test_psycopg/test_async_copy.py @@ -17,14 +17,14 @@ async def psycopg_async_session(postgres_service: PostgresService) -> AsyncGenerator[PsycopgAsyncDriver, None]: """Create a psycopg async session with test table.""" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "autocommit": True, } ) pool = await config.create_pool() - config.pool_instance = pool + config.connection_instance = pool try: async with config.provide_session() as session: diff --git a/tests/integration/test_adapters/test_psycopg/test_connection.py b/tests/integration/test_adapters/test_psycopg/test_connection.py index 09cc0f91c..d4e3aed2c 100644 --- a/tests/integration/test_adapters/test_psycopg/test_connection.py +++ b/tests/integration/test_adapters/test_psycopg/test_connection.py @@ -29,7 +29,7 @@ def test_sync_connection(postgres_service: PostgresService) -> None: """Test sync connection components.""" # Test direct connection sync_config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"host={postgres_service.host} port={postgres_service.port} user={postgres_service.user} password={postgres_service.password} dbname={postgres_service.database}" } ) @@ -48,7 +48,7 @@ def test_sync_connection(postgres_service: PostgresService) -> None: # Test connection pool another_config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgres://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "min_size": 1, "max_size": 5, diff --git a/tests/integration/test_adapters/test_psycopg/test_exceptions.py b/tests/integration/test_adapters/test_psycopg/test_exceptions.py index 6cc2ebf57..8ad8bf3e9 100644 --- a/tests/integration/test_adapters/test_psycopg/test_exceptions.py +++ b/tests/integration/test_adapters/test_psycopg/test_exceptions.py @@ -21,7 +21,7 @@ def psycopg_sync_exception_session(postgres_service: PostgresService) -> Generator[PsycopgSyncDriver, None, None]: """Create a psycopg sync session for exception testing.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "kwargs": {"autocommit": True}, } @@ -31,7 +31,7 @@ def psycopg_sync_exception_session(postgres_service: PostgresService) -> Generat with config.provide_session() as session: yield session finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -41,7 +41,7 @@ async def psycopg_async_exception_session( ) -> AsyncGenerator[PsycopgAsyncDriver, None]: """Create a psycopg async session for exception testing.""" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}", "kwargs": {"autocommit": True}, } @@ -51,7 +51,7 @@ async def psycopg_async_exception_session( async with config.provide_session() as session: yield session finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/test_execute_many.py b/tests/integration/test_adapters/test_psycopg/test_execute_many.py index 29937da4c..0de2c3096 100644 --- a/tests/integration/test_adapters/test_psycopg/test_execute_many.py +++ b/tests/integration/test_adapters/test_psycopg/test_execute_many.py @@ -16,7 +16,7 @@ def psycopg_batch_session(postgres_service: PostgresService) -> "Generator[PsycopgSyncDriver, None, None]": """Create a Psycopg session for batch operation testing.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, diff --git a/tests/integration/test_adapters/test_psycopg/test_extensions/test_adk/test_owner_id_column.py b/tests/integration/test_adapters/test_psycopg/test_extensions/test_adk/test_owner_id_column.py index fe7c3e664..789eff133 100644 --- a/tests/integration/test_adapters/test_psycopg/test_extensions/test_adk/test_owner_id_column.py +++ b/tests/integration/test_adapters/test_psycopg/test_extensions/test_adk/test_owner_id_column.py @@ -18,7 +18,7 @@ async def psycopg_async_store_with_fk(postgres_service: "PostgresService") -> "AsyncGenerator[Any, None]": """Create Psycopg async ADK store with owner_id_column configured.""" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, extension_config={ @@ -37,7 +37,7 @@ async def psycopg_async_store_with_fk(postgres_service: "PostgresService") -> "A await cur.execute("DROP TABLE IF EXISTS test_events_fk CASCADE") await cur.execute("DROP TABLE IF EXISTS test_sessions_fk CASCADE") - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -45,7 +45,7 @@ async def psycopg_async_store_with_fk(postgres_service: "PostgresService") -> "A def psycopg_sync_store_with_fk(postgres_service: "PostgresService") -> "Generator[Any, None, None]": """Create Psycopg sync ADK store with owner_id_column configured.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, extension_config={ @@ -64,7 +64,7 @@ def psycopg_sync_store_with_fk(postgres_service: "PostgresService") -> "Generato cur.execute("DROP TABLE IF EXISTS test_events_sync_fk CASCADE") cur.execute("DROP TABLE IF EXISTS test_sessions_sync_fk CASCADE") - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -83,7 +83,7 @@ def test_sync_store_owner_id_column_initialization(psycopg_sync_store_with_fk: P async def test_async_store_inherits_owner_id_column(postgres_service: "PostgresService") -> None: """Test that async store correctly inherits owner_id_column from base class.""" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, extension_config={ @@ -101,14 +101,14 @@ async def test_async_store_inherits_owner_id_column(postgres_service: "PostgresS assert store.owner_id_column_ddl == "org_id UUID" assert store.owner_id_column_name == "org_id" - if config.pool_instance: + if config.connection_instance: await config.close_pool() def test_sync_store_inherits_owner_id_column(postgres_service: "PostgresService") -> None: """Test that sync store correctly inherits owner_id_column from base class.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, extension_config={ @@ -126,14 +126,14 @@ def test_sync_store_inherits_owner_id_column(postgres_service: "PostgresService" assert store.owner_id_column_ddl == "company_id BIGINT" assert store.owner_id_column_name == "company_id" - if config.pool_instance: + if config.connection_instance: config.close_pool() async def test_async_store_without_owner_id_column(postgres_service: "PostgresService") -> None: """Test that async store works without owner_id_column (default behavior).""" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, extension_config={"adk": {"session_table": "test_no_fk_async", "events_table": "test_events_no_fk_async"}}, @@ -143,14 +143,14 @@ async def test_async_store_without_owner_id_column(postgres_service: "PostgresSe assert store.owner_id_column_ddl is None assert store.owner_id_column_name is None - if config.pool_instance: + if config.connection_instance: await config.close_pool() def test_sync_store_without_owner_id_column(postgres_service: "PostgresService") -> None: """Test that sync store works without owner_id_column (default behavior).""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, extension_config={"adk": {"session_table": "test_no_fk_sync", "events_table": "test_events_no_fk_sync"}}, @@ -160,7 +160,7 @@ def test_sync_store_without_owner_id_column(postgres_service: "PostgresService") assert store.owner_id_column_ddl is None assert store.owner_id_column_name is None - if config.pool_instance: + if config.connection_instance: config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/test_extensions/test_litestar/test_store_sync.py b/tests/integration/test_adapters/test_psycopg/test_extensions/test_litestar/test_store_sync.py index 237bf7108..abb8e4cb1 100644 --- a/tests/integration/test_adapters/test_psycopg/test_extensions/test_litestar/test_store_sync.py +++ b/tests/integration/test_adapters/test_psycopg/test_extensions/test_litestar/test_store_sync.py @@ -17,7 +17,7 @@ async def psycopg_sync_store(postgres_service: PostgresService) -> AsyncGenerator[PsycopgSyncStore, None]: """Create Psycopg sync store with test database.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, @@ -35,7 +35,7 @@ async def psycopg_sync_store(postgres_service: PostgresService) -> AsyncGenerato except Exception: pass finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/test_migrations.py b/tests/integration/test_adapters/test_psycopg/test_migrations.py index 00d6b01d5..81513b3b9 100644 --- a/tests/integration/test_adapters/test_psycopg/test_migrations.py +++ b/tests/integration/test_adapters/test_psycopg/test_migrations.py @@ -23,7 +23,7 @@ def test_psycopg_sync_migration_full_workflow(tmp_path: Path, postgres_service: migration_dir = tmp_path / "migrations" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -82,7 +82,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -96,7 +96,7 @@ async def test_psycopg_async_migration_full_workflow(tmp_path: Path, postgres_se migration_dir = tmp_path / "migrations" config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -157,7 +157,7 @@ def down(): ) assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -172,7 +172,7 @@ def test_psycopg_sync_multiple_migrations_workflow(tmp_path: Path, postgres_serv migration_dir = tmp_path / "migrations" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -257,7 +257,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -277,7 +277,7 @@ async def test_psycopg_async_multiple_migrations_workflow(tmp_path: Path, postgr pytest.skip("PsycopgAsyncConfig not available") config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -362,7 +362,7 @@ def down(): ) assert len(users_result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -376,7 +376,7 @@ def test_psycopg_sync_migration_current_command(tmp_path: Path, postgres_service migration_dir = tmp_path / "migrations" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -418,7 +418,7 @@ def down(): current_version = commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -437,7 +437,7 @@ async def test_psycopg_async_migration_current_command(tmp_path: Path, postgres_ pytest.skip("PsycopgAsyncConfig not available") config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -479,7 +479,7 @@ def down(): current_version = await commands.current() assert current_version is None or current_version == "base" finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -488,7 +488,7 @@ def test_psycopg_sync_migration_error_handling(tmp_path: Path, postgres_service: migration_dir = tmp_path / "migrations" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={ @@ -525,7 +525,7 @@ def down(): except Exception as e: assert "no such" in str(e).lower() or "does not exist" in str(e).lower() finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -539,7 +539,7 @@ async def test_psycopg_async_migration_error_handling(tmp_path: Path, postgres_s pytest.skip("PsycopgAsyncConfig not available") config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={ @@ -576,7 +576,7 @@ def down(): except Exception as e: assert "no such" in str(e).lower() or "does not exist" in str(e).lower() finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() @@ -590,7 +590,7 @@ def test_psycopg_sync_migration_with_transactions(tmp_path: Path, postgres_servi migration_dir = tmp_path / "migrations" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -655,7 +655,7 @@ def down(): result = driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: config.close_pool() @@ -674,7 +674,7 @@ async def test_psycopg_async_migration_with_transactions(tmp_path: Path, postgre pytest.skip("PsycopgAsyncConfig not available") config = PsycopgAsyncConfig( - pool_config={ + connection_config={ "conninfo": f"postgresql://{postgres_service.user}:{postgres_service.password}@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}" }, migration_config={"script_location": str(migration_dir), "version_table_name": migration_table}, @@ -739,5 +739,5 @@ def down(): result = await driver.execute(f"SELECT * FROM {users_table} WHERE name = 'Rollback User'") assert len(result.data) == 0 finally: - if config.pool_instance: + if config.connection_instance: await config.close_pool() diff --git a/tests/integration/test_adapters/test_psycopg/test_parameter_styles.py b/tests/integration/test_adapters/test_psycopg/test_parameter_styles.py index b240ca628..478e87f8e 100644 --- a/tests/integration/test_adapters/test_psycopg/test_parameter_styles.py +++ b/tests/integration/test_adapters/test_psycopg/test_parameter_styles.py @@ -17,7 +17,7 @@ def psycopg_parameters_session(postgres_service: PostgresService) -> "Generator[PsycopgSyncDriver, None, None]": """Create a Psycopg session for parameter style testing.""" config = PsycopgSyncConfig( - pool_config={ + connection_config={ "host": postgres_service.host, "port": postgres_service.port, "user": postgres_service.user, diff --git a/tests/integration/test_adapters/test_spanner/conftest.py b/tests/integration/test_adapters/test_spanner/conftest.py index b9d97322e..d062b317c 100644 --- a/tests/integration/test_adapters/test_spanner/conftest.py +++ b/tests/integration/test_adapters/test_spanner/conftest.py @@ -43,7 +43,7 @@ def spanner_config( api_endpoint = f"{spanner_service.host}:{spanner_service.port}" return SpannerSyncConfig( - pool_config={ + connection_config={ "project": spanner_service.project, "instance_id": spanner_service.instance_name, "database_id": spanner_service.database_name, diff --git a/tests/integration/test_adapters/test_spanner/test_extensions/test_adk/conftest.py b/tests/integration/test_adapters/test_spanner/test_extensions/test_adk/conftest.py index 884ffbc7f..4ae8782d2 100644 --- a/tests/integration/test_adapters/test_spanner/test_extensions/test_adk/conftest.py +++ b/tests/integration/test_adapters/test_spanner/test_extensions/test_adk/conftest.py @@ -16,7 +16,7 @@ def spanner_adk_config(spanner_service: SpannerService, spanner_database: "Datab api_endpoint = f"{spanner_service.host}:{spanner_service.port}" return SpannerSyncConfig( - pool_config={ + connection_config={ "project": spanner_service.project, "instance_id": spanner_service.instance_name, "database_id": spanner_service.database_name, diff --git a/tests/integration/test_adapters/test_spanner/test_extensions/test_litestar/conftest.py b/tests/integration/test_adapters/test_spanner/test_extensions/test_litestar/conftest.py index 031878fb5..c56dd7765 100644 --- a/tests/integration/test_adapters/test_spanner/test_extensions/test_litestar/conftest.py +++ b/tests/integration/test_adapters/test_spanner/test_extensions/test_litestar/conftest.py @@ -16,7 +16,7 @@ def spanner_litestar_config(spanner_service: SpannerService, spanner_database: " api_endpoint = f"{spanner_service.host}:{spanner_service.port}" return SpannerSyncConfig( - pool_config={ + connection_config={ "project": spanner_service.project, "instance_id": spanner_service.instance_name, "database_id": spanner_service.database_name, diff --git a/tests/integration/test_adapters/test_sqlite/conftest.py b/tests/integration/test_adapters/test_sqlite/conftest.py index 681fcb7c5..aa711adb2 100644 --- a/tests/integration/test_adapters/test_sqlite/conftest.py +++ b/tests/integration/test_adapters/test_sqlite/conftest.py @@ -15,7 +15,7 @@ def sqlite_session() -> Generator[SqliteDriver, None, None]: This fixture creates an in-memory SQLite database with a test table and ensures proper cleanup after test completion. """ - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) try: with config.provide_session() as session: @@ -49,7 +49,7 @@ def sqlite_session() -> Generator[SqliteDriver, None, None]: def sqlite_basic_session() -> Generator[SqliteDriver, None, None]: """Yield a bare SQLite session for tests needing a clean database.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) try: with config.provide_session() as session: session.execute("PRAGMA foreign_keys = ON") @@ -97,7 +97,7 @@ def sqlite_driver() -> Generator[SqliteDriver, None, None]: def sqlite_config_shared_memory() -> SqliteConfig: """Create SQLite config with shared memory for pooling tests.""" return SqliteConfig( - pool_config=cast( + connection_config=cast( "Any", {"database": "file::memory:?cache=shared", "uri": True, "pool_min_size": 2, "pool_max_size": 5} ) ) @@ -106,7 +106,9 @@ def sqlite_config_shared_memory() -> SqliteConfig: @pytest.fixture def sqlite_config_regular_memory() -> SqliteConfig: """Create SQLite config with regular memory for auto-conversion tests.""" - return SqliteConfig(pool_config=cast("Any", {"database": ":memory:", "pool_min_size": 5, "pool_max_size": 10})) + return SqliteConfig( + connection_config=cast("Any", {"database": ":memory:", "pool_min_size": 5, "pool_max_size": 10}) + ) @pytest.fixture @@ -119,7 +121,9 @@ def sqlite_temp_file_config() -> Generator[SqliteConfig, None, None]: db_path = tmp.name try: - config = SqliteConfig(pool_config=cast("Any", {"database": db_path, "pool_min_size": 3, "pool_max_size": 8})) + config = SqliteConfig( + connection_config=cast("Any", {"database": db_path, "pool_min_size": 3, "pool_max_size": 8}) + ) yield config finally: try: diff --git a/tests/integration/test_adapters/test_sqlite/test_driver_features.py b/tests/integration/test_adapters/test_sqlite/test_driver_features.py index 3bb4abc74..9bd72a15f 100644 --- a/tests/integration/test_adapters/test_sqlite/test_driver_features.py +++ b/tests/integration/test_adapters/test_sqlite/test_driver_features.py @@ -15,14 +15,14 @@ @pytest.mark.sqlite def test_driver_features_enabled_by_default() -> None: """Test that driver features are enabled by default for stdlib types.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) assert config.driver_features.get("enable_custom_adapters") is True @pytest.mark.sqlite def test_enable_custom_adapters_feature() -> None: """Test enabling custom type adapters feature.""" - config = SqliteConfig(pool_config={"database": ":memory:"}, driver_features={"enable_custom_adapters": True}) + config = SqliteConfig(connection_config={"database": ":memory:"}, driver_features={"enable_custom_adapters": True}) assert config.driver_features["enable_custom_adapters"] is True @@ -31,7 +31,7 @@ def test_enable_custom_adapters_feature() -> None: def test_json_serialization_with_custom_adapters() -> None: """Test JSON dict/list serialization with custom adapters enabled.""" config = SqliteConfig( - pool_config={"database": ":memory:", "detect_types": sqlite3.PARSE_DECLTYPES}, + connection_config={"database": ":memory:", "detect_types": sqlite3.PARSE_DECLTYPES}, driver_features={"enable_custom_adapters": True}, ) @@ -67,7 +67,7 @@ def custom_deserializer(text: str) -> Any: return json.loads(text) config = SqliteConfig( - pool_config={"database": ":memory:", "detect_types": sqlite3.PARSE_DECLTYPES}, + connection_config={"database": ":memory:", "detect_types": sqlite3.PARSE_DECLTYPES}, driver_features={ "enable_custom_adapters": True, "json_serializer": custom_serializer, @@ -93,7 +93,7 @@ def custom_deserializer(text: str) -> Any: @pytest.mark.sqlite def test_backward_compatibility_without_custom_adapters() -> None: """Test backward compatibility when custom adapters are not enabled.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) sql = SQLSpec() sql.add_config(config) diff --git a/tests/integration/test_adapters/test_sqlite/test_extensions/test_adk/test_owner_id_column.py b/tests/integration/test_adapters/test_sqlite/test_extensions/test_adk/test_owner_id_column.py index 0c39ab038..559f4e231 100644 --- a/tests/integration/test_adapters/test_sqlite/test_extensions/test_adk/test_owner_id_column.py +++ b/tests/integration/test_adapters/test_sqlite/test_extensions/test_adk/test_owner_id_column.py @@ -74,7 +74,7 @@ def sqlite_config() -> SqliteConfig: - Prevent table schema conflicts between different tests - Enable foreign key relationships across connections """ - return SqliteConfig(pool_config={"database": _make_shared_memory_db_name(), "uri": True}) + return SqliteConfig(connection_config={"database": _make_shared_memory_db_name(), "uri": True}) @pytest.fixture @@ -109,7 +109,7 @@ async def test_owner_id_column_integer_reference( tenant_id = _insert_tenant(sqlite_config, "tenant_alpha") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={ "adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id) ON DELETE CASCADE"} }, @@ -141,7 +141,7 @@ async def test_owner_id_column_text_reference( _insert_user(sqlite_config, username, "alice@example.com") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={"adk": {"owner_id_column": "user_ref TEXT REFERENCES users(username) ON DELETE CASCADE"}}, ) store = SqliteADKStore(config_with_extension) @@ -165,7 +165,7 @@ async def test_owner_id_column_cascade_delete( tenant_id = _insert_tenant(sqlite_config, "tenant_beta") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={ "adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id) ON DELETE CASCADE"} }, @@ -194,7 +194,7 @@ async def test_owner_id_column_constraint_violation( _create_tenants_table(sqlite_config) config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={"adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id)"}}, ) store = SqliteADKStore(config_with_extension) @@ -215,7 +215,7 @@ async def test_owner_id_column_not_null_constraint( _create_tenants_table(sqlite_config) config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={"adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id)"}}, ) store = SqliteADKStore(config_with_extension) @@ -235,7 +235,7 @@ async def test_owner_id_column_nullable( tenant_id = _insert_tenant(sqlite_config, "tenant_gamma") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={"adk": {"owner_id_column": "tenant_id INTEGER REFERENCES tenants(id)"}}, ) store = SqliteADKStore(config_with_extension) @@ -273,7 +273,7 @@ async def test_foreign_keys_pragma_enabled( tenant_id = _insert_tenant(sqlite_config, "tenant_delta") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={"adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id)"}}, ) store = SqliteADKStore(config_with_extension) @@ -296,7 +296,7 @@ async def test_multi_tenant_isolation( tenant2_id = _insert_tenant(sqlite_config, "tenant_two") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={ "adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id) ON DELETE CASCADE"} }, @@ -333,7 +333,7 @@ async def test_multi_tenant_isolation( async def test_owner_id_column_ddl_extraction(sqlite_config: SqliteConfig) -> None: """Test that column name is correctly extracted from DDL.""" config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={ "adk": {"owner_id_column": "tenant_id INTEGER NOT NULL REFERENCES tenants(id) ON DELETE CASCADE"} }, @@ -351,7 +351,7 @@ async def test_create_session_without_fk_when_not_required( _create_tenants_table(sqlite_config) config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={"adk": {"owner_id_column": "tenant_id INTEGER REFERENCES tenants(id)"}}, ) store = SqliteADKStore(config_with_extension) @@ -371,7 +371,7 @@ async def test_owner_id_with_default_value( default_tenant_id = _insert_tenant(sqlite_config, "default_tenant") config_with_extension = SqliteConfig( - pool_config=sqlite_config.pool_config, + connection_config=sqlite_config.connection_config, extension_config={ "adk": {"owner_id_column": f"tenant_id INTEGER DEFAULT {default_tenant_id} REFERENCES tenants(id)"} }, diff --git a/tests/integration/test_adapters/test_sqlite/test_extensions/test_litestar/test_store.py b/tests/integration/test_adapters/test_sqlite/test_extensions/test_litestar/test_store.py index b80f14664..fc9fead44 100644 --- a/tests/integration/test_adapters/test_sqlite/test_extensions/test_litestar/test_store.py +++ b/tests/integration/test_adapters/test_sqlite/test_extensions/test_litestar/test_store.py @@ -16,7 +16,7 @@ async def sqlite_store() -> AsyncGenerator[SQLiteStore, None]: """Create SQLite store with shared in-memory database.""" config = SqliteConfig( - pool_config={"database": "file:test_sessions_mem?mode=memory&cache=shared", "uri": True}, + connection_config={"database": "file:test_sessions_mem?mode=memory&cache=shared", "uri": True}, extension_config={"litestar": {"session_table": "test_sessions"}}, ) store = SQLiteStore(config) diff --git a/tests/integration/test_adapters/test_sqlite/test_migrations.py b/tests/integration/test_adapters/test_sqlite/test_migrations.py index 91046aaa9..4f5250761 100644 --- a/tests/integration/test_adapters/test_sqlite/test_migrations.py +++ b/tests/integration/test_adapters/test_sqlite/test_migrations.py @@ -16,7 +16,7 @@ def test_sqlite_migration_full_workflow(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -74,7 +74,7 @@ def test_sqlite_multiple_migrations_workflow(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -161,7 +161,7 @@ def test_sqlite_migration_current_command(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -195,7 +195,7 @@ def test_sqlite_migration_error_handling(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -233,7 +233,7 @@ def test_sqlite_migration_with_transactions(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) commands: SyncMigrationCommands[Any] | AsyncMigrationCommands[Any] = create_migration_commands(config) @@ -283,7 +283,7 @@ def test_sqlite_config_migrate_up_method(tmp_path: Path) -> None: temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) @@ -323,7 +323,7 @@ def test_sqlite_config_migrate_down_method(tmp_path: Path) -> None: temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) @@ -368,7 +368,7 @@ def test_sqlite_config_get_current_migration_method(tmp_path: Path) -> None: temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) @@ -404,7 +404,7 @@ def test_sqlite_config_create_migration_method(tmp_path: Path) -> None: temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) @@ -425,7 +425,7 @@ def test_sqlite_config_stamp_migration_method(tmp_path: Path) -> None: temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) @@ -462,7 +462,7 @@ def test_sqlite_config_fix_migrations_dry_run(tmp_path: Path) -> None: temp_db = str(tmp_path / "test.db") config = SqliteConfig( - pool_config={"database": temp_db}, + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir), "version_table_name": "sqlspec_migrations"}, ) diff --git a/tests/integration/test_adapters/test_sqlite/test_parameter_styles.py b/tests/integration/test_adapters/test_sqlite/test_parameter_styles.py index b7ce1f7db..ab43a94e0 100644 --- a/tests/integration/test_adapters/test_sqlite/test_parameter_styles.py +++ b/tests/integration/test_adapters/test_sqlite/test_parameter_styles.py @@ -209,7 +209,7 @@ def test_parameterized_query_patterns( def test_sqlite_none_parameters() -> None: """Test that None values in named parameters are handled correctly by SQLite.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with config.provide_session() as driver: # Create test table @@ -277,7 +277,7 @@ def test_sqlite_none_parameters() -> None: def test_sqlite_none_parameters_qmark_style() -> None: """Test None values with QMARK (?) parameter style - SQLite default.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with config.provide_session() as driver: # Create test table @@ -309,7 +309,7 @@ def test_sqlite_none_parameters_qmark_style() -> None: def test_sqlite_all_none_parameters() -> None: """Test when all parameter values are None.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with config.provide_session() as driver: # Create test table @@ -348,7 +348,7 @@ def test_sqlite_all_none_parameters() -> None: def test_sqlite_none_with_execute_many() -> None: """Test None values work correctly with execute_many.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with config.provide_session() as driver: # Create test table @@ -389,7 +389,7 @@ def test_sqlite_none_with_execute_many() -> None: def test_sqlite_none_in_where_clause() -> None: """Test None values in WHERE clauses work correctly.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with config.provide_session() as driver: # Create test table diff --git a/tests/integration/test_adapters/test_sqlite/test_pooling.py b/tests/integration/test_adapters/test_sqlite/test_pooling.py index 2fd6359e0..516b32c4c 100644 --- a/tests/integration/test_adapters/test_sqlite/test_pooling.py +++ b/tests/integration/test_adapters/test_sqlite/test_pooling.py @@ -15,8 +15,8 @@ def test_shared_memory_pooling(sqlite_config_shared_memory: SqliteConfig) -> Non """Test that shared memory databases allow pooling.""" config = sqlite_config_shared_memory - assert config.pool_config["pool_min_size"] == 2 - assert config.pool_config["pool_max_size"] == 5 + assert config.connection_config["pool_min_size"] == 2 + assert config.connection_config["pool_max_size"] == 5 with config.provide_session() as session1: session1.execute_script(""" @@ -42,8 +42,8 @@ def test_regular_memory_auto_conversion(sqlite_config_regular_memory: SqliteConf """Test that regular memory databases are auto-converted to shared memory with pooling enabled.""" config = sqlite_config_regular_memory - assert config.pool_config["pool_min_size"] == 5 - assert config.pool_config["pool_max_size"] == 10 + assert config.connection_config["pool_min_size"] == 5 + assert config.connection_config["pool_max_size"] == 10 db_uri = config._get_connection_config_dict()["database"] assert db_uri.startswith("file:memory_") and "cache=private" in db_uri @@ -73,8 +73,8 @@ def test_file_database_pooling_enabled(sqlite_temp_file_config: SqliteConfig) -> """Test that file-based databases allow pooling.""" config = sqlite_temp_file_config - assert config.pool_config["pool_min_size"] == 3 - assert config.pool_config["pool_max_size"] == 8 + assert config.connection_config["pool_min_size"] == 3 + assert config.connection_config["pool_max_size"] == 8 with config.provide_session() as session1: session1.execute_script(""" @@ -214,12 +214,12 @@ def test_pool_transaction_rollback(sqlite_config_shared_memory: SqliteConfig) -> def test_config_with_pool_config_parameter(tmp_path: Path) -> None: - """Test that SqliteConfig correctly accepts pool_config parameter.""" + """Test that SqliteConfig correctly accepts connection_config parameter.""" db_path = tmp_path / "test.sqlite" - pool_config = {"database": str(db_path), "timeout": 10.0, "check_same_thread": False} + connection_config = {"database": str(db_path), "timeout": 10.0, "check_same_thread": False} - config = SqliteConfig(pool_config=pool_config) + config = SqliteConfig(connection_config=connection_config) try: connection_config = config._get_connection_config_dict() @@ -242,12 +242,12 @@ def test_config_with_pool_config_parameter(tmp_path: Path) -> None: def test_config_memory_database_conversion() -> None: """Test that :memory: databases are converted to shared memory.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) try: - db_uri = config.pool_config["database"] + db_uri = config.connection_config["database"] assert db_uri.startswith("file:memory_") and "cache=private" in db_uri - assert config.pool_config["uri"] is True + assert config.connection_config["uri"] is True with config.provide_session() as session: result = session.execute("SELECT 'memory_test' as test") @@ -264,9 +264,9 @@ def test_config_default_database() -> None: config = SqliteConfig() try: - db_uri = config.pool_config["database"] + db_uri = config.connection_config["database"] assert db_uri.startswith("file:memory_") and "cache=private" in db_uri - assert config.pool_config["uri"] is True + assert config.connection_config["uri"] is True with config.provide_session() as session: result = session.execute("SELECT 'default_test' as test") diff --git a/tests/integration/test_cli/test_sync_adapter_cli.py b/tests/integration/test_cli/test_sync_adapter_cli.py index 219a02e7d..b8c6bdf66 100644 --- a/tests/integration/test_cli/test_sync_adapter_cli.py +++ b/tests/integration/test_cli/test_sync_adapter_cli.py @@ -65,7 +65,7 @@ def test_sqlite_full_migration_workflow(temp_project_dir: Path, cleanup_test_mod def get_config(): return SqliteConfig( bind_key="sqlite_workflow_test", - pool_config={{"database": "{db_path}"}}, + connection_config={{"database": "{db_path}"}}, migration_config={{ "enabled": True, "script_location": "{migrations_dir}" @@ -124,7 +124,7 @@ def test_duckdb_full_migration_workflow(temp_project_dir: Path, cleanup_test_mod def get_config(): return DuckDBConfig( bind_key="duckdb_workflow_test", - pool_config={{"database": "{db_path}"}}, + connection_config={{"database": "{db_path}"}}, migration_config={{ "enabled": True, "script_location": "{migrations_dir}" @@ -165,7 +165,7 @@ def test_sqlite_upgrade_downgrade_cycle(temp_project_dir: Path, cleanup_test_mod def get_config(): return SqliteConfig( bind_key="sqlite_cycle_test", - pool_config={{"database": "{db_path}"}}, + connection_config={{"database": "{db_path}"}}, migration_config={{ "enabled": True, "script_location": "{migrations_dir}" @@ -211,7 +211,7 @@ def get_configs(): return [ SqliteConfig( bind_key="sqlite_multi", - pool_config={{"database": "{sqlite_db}"}}, + connection_config={{"database": "{sqlite_db}"}}, migration_config={{ "enabled": True, "script_location": "{sqlite_migrations}" @@ -219,7 +219,7 @@ def get_configs(): ), DuckDBConfig( bind_key="duckdb_multi", - pool_config={{"database": "{duckdb_db}"}}, + connection_config={{"database": "{duckdb_db}"}}, migration_config={{ "enabled": True, "script_location": "{duckdb_migrations}" @@ -263,7 +263,7 @@ def test_sqlite_stamp_command(temp_project_dir: Path, cleanup_test_modules: None def get_config(): return SqliteConfig( bind_key="sqlite_stamp_test", - pool_config={{"database": "{db_path}"}}, + connection_config={{"database": "{db_path}"}}, migration_config={{ "enabled": True, "script_location": "{migrations_dir}" diff --git a/tests/integration/test_config/conftest.py b/tests/integration/test_config/conftest.py new file mode 100644 index 000000000..b38add286 --- /dev/null +++ b/tests/integration/test_config/conftest.py @@ -0,0 +1,18 @@ +"""Fixtures for connection_config integration tests.""" + +from typing import Any + +import pytest +from pytest_databases.docker.postgres import PostgresService + + +@pytest.fixture(scope="function") +def asyncpg_connection_config(postgres_service: PostgresService) -> "dict[str, Any]": + """Base pool configuration for AsyncPG tests.""" + return { + "host": postgres_service.host, + "port": postgres_service.port, + "user": postgres_service.user, + "password": postgres_service.password, + "database": postgres_service.database, + } diff --git a/tests/integration/test_config/test_connection_instance_injection.py b/tests/integration/test_config/test_connection_instance_injection.py new file mode 100644 index 000000000..946444658 --- /dev/null +++ b/tests/integration/test_config/test_connection_instance_injection.py @@ -0,0 +1,347 @@ +"""Integration tests for connection_instance parameter injection. + +Tests that pre-created pool/connection instances can be injected via the +connection_instance parameter and work correctly with database operations. + +This validates that the standardized parameter naming works end-to-end +with real database connections. +""" + +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from sqlspec.adapters.aiosqlite.config import AiosqliteConfig +from sqlspec.adapters.asyncpg.config import AsyncpgConfig +from sqlspec.adapters.duckdb.config import DuckDBConfig +from sqlspec.adapters.sqlite.config import SqliteConfig + +pytestmark = pytest.mark.xdist_group("config") + + +@pytest.mark.asyncio +@pytest.mark.postgres +async def test_asyncpg_connection_instance_with_pre_created_pool(asyncpg_connection_config: dict) -> None: + """Test AsyncpgConfig with connection_instance using pre-created pool.""" + import asyncpg + + # Create pool manually + pool = await asyncpg.create_pool(**asyncpg_connection_config, min_size=1, max_size=2) + + try: + # Inject pool into config + config = AsyncpgConfig(connection_config=asyncpg_connection_config, connection_instance=pool) + + # Verify pool is used + assert config.connection_instance is pool + + # Test database operation + async with config.provide_session() as session: + result = await session.select_one("SELECT 1 as value") + assert result["value"] == 1 + finally: + await pool.close() + + +@pytest.mark.asyncio +@pytest.mark.postgres +async def test_asyncpg_connection_instance_bypasses_pool_creation(asyncpg_connection_config: dict) -> None: + """Test that connection_instance bypasses _create_pool logic.""" + import asyncpg + + # Create pool manually + pool = await asyncpg.create_pool(**asyncpg_connection_config, min_size=1, max_size=2) + + try: + # Config with connection_instance should not call _create_pool + config = AsyncpgConfig(connection_config=asyncpg_connection_config, connection_instance=pool) + + # Get pool - should return the injected one + retrieved_pool = await config.provide_pool() + assert retrieved_pool is pool + + # Verify it works + async with config.provide_session() as session: + result = await session.select_one("SELECT 2 as value") + assert result["value"] == 2 + finally: + await pool.close() + + +@pytest.mark.asyncio +async def test_aiosqlite_connection_instance_with_pre_created_pool(tmp_path: Path) -> None: + """Test AiosqliteConfig with connection_instance using pre-created pool.""" + from sqlspec.adapters.aiosqlite.pool import AiosqliteConnectionPool + + db_path = tmp_path / "test.db" + + # Create pool manually + pool = AiosqliteConnectionPool(connection_parameters={"database": str(db_path)}, pool_size=2) + + try: + # Inject pool into config + config = AiosqliteConfig(connection_config={"database": str(db_path)}, connection_instance=pool) + + # Verify pool is used + assert config.connection_instance is pool + + # Test database operation + async with config.provide_session() as session: + await session.execute("CREATE TABLE test (id INTEGER, value TEXT)") + await session.execute("INSERT INTO test VALUES (1, 'test')") + result = await session.select_one("SELECT value FROM test WHERE id = 1") + assert result["value"] == "test" + finally: + await pool.close() + + +def test_sqlite_connection_instance_with_pre_created_pool(tmp_path: Path) -> None: + """Test SqliteConfig with connection_instance using pre-created pool.""" + from sqlspec.adapters.sqlite.pool import SqliteConnectionPool + + db_path = tmp_path / "test.db" + + # Create pool manually + pool = SqliteConnectionPool(connection_parameters={"database": str(db_path)}, pool_min_size=1, pool_max_size=2) + + try: + # Inject pool into config + config = SqliteConfig(connection_config={"database": str(db_path)}, connection_instance=pool) + + # Verify pool is used + assert config.connection_instance is pool + + # Test database operation + with config.provide_session() as session: + session.execute("CREATE TABLE test (id INTEGER, value TEXT)") + session.execute("INSERT INTO test VALUES (1, 'test')") + result = session.select_one("SELECT value FROM test WHERE id = 1") + assert result["value"] == "test" + finally: + pool.close() + + +def test_duckdb_connection_instance_with_pre_created_pool() -> None: + """Test DuckDBConfig with connection_instance using pre-created pool.""" + from sqlspec.adapters.duckdb.pool import DuckDBConnectionPool + + # Create pool manually + pool = DuckDBConnectionPool(connection_config={"database": ":memory:"}) + + try: + # Inject pool into config + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=pool) + + # Verify pool is used + assert config.connection_instance is pool + + # Test database operation + with config.provide_session() as session: + session.execute("CREATE TABLE test (id INTEGER, value VARCHAR)") + session.execute("INSERT INTO test VALUES (1, 'test')") + result = session.select_one("SELECT value FROM test WHERE id = 1") + assert result["value"] == "test" + finally: + pool.close() + + +def test_sqlite_connection_instance_none_creates_new_pool(tmp_path: Path) -> None: + """Test that connection_instance=None causes new pool creation.""" + db_path = tmp_path / "test.db" + + config = SqliteConfig( + connection_config={"database": str(db_path), "pool_min_size": 2, "pool_max_size": 5}, connection_instance=None + ) + + # Should create new pool + assert config.connection_instance is None + + # Using provide_pool should create pool + pool = config.provide_pool() + assert pool is not None + + # Verify it works + with config.provide_session() as session: + session.execute("CREATE TABLE test (id INTEGER)") + session.execute("INSERT INTO test VALUES (1)") + result = session.select_one("SELECT COUNT(*) as count FROM test") + assert result["count"] == 1 + + config.close_pool() + + +@pytest.mark.asyncio +async def test_aiosqlite_connection_instance_none_creates_new_pool(tmp_path: Path) -> None: + """Test that connection_instance=None causes new pool creation for async.""" + db_path = tmp_path / "test.db" + + config = AiosqliteConfig( + connection_config={"database": str(db_path), "pool_min_size": 2, "pool_max_size": 5}, connection_instance=None + ) + + # Should create new pool + assert config.connection_instance is None + + # Using provide_pool should create pool + pool = await config.provide_pool() + assert pool is not None + + # Verify it works + async with config.provide_session() as session: + await session.execute("CREATE TABLE test (id INTEGER)") + await session.execute("INSERT INTO test VALUES (1)") + result = await session.select_one("SELECT COUNT(*) as count FROM test") + assert result["count"] == 1 + + await config.close_pool() + + +def test_connection_instance_persists_across_sessions() -> None: + """Test that connection_instance persists across multiple sessions.""" + from sqlspec.adapters.duckdb.pool import DuckDBConnectionPool + + pool = DuckDBConnectionPool(connection_config={"database": ":memory:"}) + + try: + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=pool) + + # First session + with config.provide_session() as session1: + session1.execute("CREATE TABLE test (id INTEGER)") + session1.execute("INSERT INTO test VALUES (1)") + + # Second session - should use same pool + with config.provide_session() as session2: + result = session2.select_one("SELECT COUNT(*) as count FROM test") + assert result["count"] == 1 + + # Verify connection_instance is still the same + assert config.connection_instance is pool + finally: + pool.close() + + +def test_connection_instance_with_empty_connection_config() -> None: + """Test that connection_instance works with empty connection_config.""" + from sqlspec.adapters.duckdb.pool import DuckDBConnectionPool + + pool = DuckDBConnectionPool(connection_config={"database": ":memory:"}) + + try: + # Empty connection_config, only connection_instance + config = DuckDBConfig(connection_config={}, connection_instance=pool) + + assert config.connection_instance is pool + # DuckDB adds default database parameter + assert "database" in config.connection_config + + # Should still work + with config.provide_session() as session: + result = session.select_one("SELECT 1 as value") + assert result["value"] == 1 + finally: + pool.close() + + +@pytest.mark.asyncio +@pytest.mark.postgres +async def test_asyncpg_connection_instance_overrides_connection_config_pool_params( + asyncpg_connection_config: dict, +) -> None: + """Test that connection_instance overrides pool parameters in connection_config.""" + import asyncpg + + # Create pool with specific size + pool = await asyncpg.create_pool(**asyncpg_connection_config, min_size=3, max_size=5) + + try: + # Config has different pool params but connection_instance should take precedence + merged_config = dict(asyncpg_connection_config) + merged_config["min_size"] = 10 # This should be ignored + merged_config["max_size"] = 20 # This should be ignored + config = AsyncpgConfig(connection_config=merged_config, connection_instance=pool) + + # The injected pool should be used, not a new one with config params + retrieved_pool = await config.provide_pool() + assert retrieved_pool is pool + + # Verify pool has original size, not config size + # (We can't directly check min/max_size, but we can verify it's the same pool object) + async with config.provide_session() as session: + result = await session.select_one("SELECT 1 as value") + assert result["value"] == 1 + finally: + await pool.close() + + +def test_connection_instance_manual_close() -> None: + """Test that manually created connection_instance can be closed independently.""" + from sqlspec.adapters.duckdb.pool import DuckDBConnectionPool + + pool = DuckDBConnectionPool(connection_config={"database": ":memory:"}) + + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=pool) + + # Use the config + with config.provide_session() as session: + session.execute("CREATE TABLE test (id INTEGER)") + + # Close the pool manually (not via config.close_pool()) + pool.close() + + # Config's connection_instance is now closed + # Attempting to use should fail or create new pool depending on implementation + assert config.connection_instance is pool + + +def test_sqlite_connection_instance_after_close_pool() -> None: + """Test that connection_instance is set to None after close_pool().""" + from sqlspec.adapters.sqlite.pool import SqliteConnectionPool + + pool = SqliteConnectionPool(connection_parameters={"database": ":memory:"}, pool_min_size=1, pool_max_size=2) + + config = SqliteConfig(connection_config={"database": ":memory:"}, connection_instance=pool) + + # Close the pool via config + config.close_pool() + + # connection_instance should be set to None + assert config.connection_instance is None + + +@pytest.mark.asyncio +async def test_aiosqlite_connection_instance_after_close_pool() -> None: + """Test that connection_instance can be closed via config.""" + from sqlspec.adapters.aiosqlite.pool import AiosqliteConnectionPool + + pool = AiosqliteConnectionPool(connection_parameters={"database": ":memory:"}, pool_size=2) + + config = AiosqliteConfig(connection_config={"database": ":memory:"}, connection_instance=pool) + + # Close the pool via config + await config.close_pool() + + # Verify pool is closed + assert pool.is_closed + + +def test_connection_instance_with_mock_pool() -> None: + """Test that connection_instance accepts mock pools for testing.""" + mock_pool = MagicMock() + mock_pool.acquire = MagicMock() + + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=mock_pool) + + assert config.connection_instance is mock_pool + + +@pytest.mark.asyncio +async def test_connection_instance_with_async_mock_pool() -> None: + """Test that connection_instance accepts async mock pools for testing.""" + mock_pool = MagicMock() + mock_pool.acquire = AsyncMock() + + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}, connection_instance=mock_pool) + + assert config.connection_instance is mock_pool diff --git a/tests/integration/test_dishka/conftest.py b/tests/integration/test_dishka/conftest.py index 241b9ca53..44c9dcd11 100644 --- a/tests/integration/test_dishka/conftest.py +++ b/tests/integration/test_dishka/conftest.py @@ -22,7 +22,7 @@ def simple_sqlite_provider() -> "Provider": class DatabaseProvider(Provider): # type: ignore[misc] @provide(scope=Scope.APP) # type: ignore[misc] def get_database_config(self) -> SqliteConfig: - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "dishka_sqlite" return config @@ -43,7 +43,7 @@ class AsyncDatabaseProvider(Provider): # type: ignore[misc] async def get_database_config(self) -> SqliteConfig: # Simulate some async work (e.g., fetching config from remote service) await asyncio.sleep(0.001) - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "async_dishka_sqlite" return config @@ -61,14 +61,14 @@ def multi_config_provider() -> "Provider": class MultiDatabaseProvider(Provider): # type: ignore[misc] @provide(scope=Scope.APP) # type: ignore[misc] def get_sqlite_config(self) -> SqliteConfig: - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "dishka_multi_sqlite" config.migration_config = {"enabled": True, "script_location": "sqlite_migrations"} return config @provide(scope=Scope.APP) # type: ignore[misc] def get_duckdb_config(self) -> DuckDBConfig: - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) config.bind_key = "dishka_multi_duckdb" config.migration_config = {"enabled": True, "script_location": "duckdb_migrations"} return config @@ -91,7 +91,7 @@ class AsyncMultiDatabaseProvider(Provider): # type: ignore[misc] @provide(scope=Scope.APP) # type: ignore[misc] async def get_sqlite_config(self) -> SqliteConfig: await asyncio.sleep(0.001) - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "async_multi_sqlite" config.migration_config = {"enabled": True} return config @@ -99,7 +99,7 @@ async def get_sqlite_config(self) -> SqliteConfig: @provide(scope=Scope.APP) # type: ignore[misc] async def get_aiosqlite_config(self) -> AiosqliteConfig: await asyncio.sleep(0.001) - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "async_multi_aiosqlite" config.migration_config = {"enabled": True} return config @@ -107,7 +107,7 @@ async def get_aiosqlite_config(self) -> AiosqliteConfig: @provide(scope=Scope.APP) # type: ignore[misc] async def get_duckdb_config(self) -> DuckDBConfig: await asyncio.sleep(0.001) - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) config.bind_key = "async_multi_duckdb" config.migration_config = {"enabled": True} return config diff --git a/tests/integration/test_dishka/test_dishka_integration.py b/tests/integration/test_dishka/test_dishka_integration.py index d4be70f0b..222587d12 100644 --- a/tests/integration/test_dishka/test_dishka_integration.py +++ b/tests/integration/test_dishka/test_dishka_integration.py @@ -33,7 +33,7 @@ class DatabaseProvider(Provider): @provide(scope=Scope.APP) def get_database_config(self) -> SqliteConfig: return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "migrations"}, bind_key="dishka_sqlite" ) @@ -70,7 +70,7 @@ async def get_database_config(self) -> SqliteConfig: # Simulate some async work await asyncio.sleep(0.001) return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "migrations"}, bind_key="async_dishka_sqlite" ) @@ -107,7 +107,7 @@ class MultiDatabaseProvider(Provider): @provide(scope=Scope.APP) def get_sqlite_config(self) -> SqliteConfig: return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "sqlite_migrations"}, bind_key="dishka_multi_sqlite" ) @@ -115,7 +115,7 @@ def get_sqlite_config(self) -> SqliteConfig: @provide(scope=Scope.APP) def get_duckdb_config(self) -> DuckDBConfig: return DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "duckdb_migrations"}, bind_key="dishka_multi_duckdb" ) @@ -158,7 +158,7 @@ class AsyncMultiDatabaseProvider(Provider): async def get_sqlite_config(self) -> SqliteConfig: await asyncio.sleep(0.001) return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True}, bind_key="async_multi_sqlite" ) @@ -167,7 +167,7 @@ async def get_sqlite_config(self) -> SqliteConfig: async def get_aiosqlite_config(self) -> AiosqliteConfig: await asyncio.sleep(0.001) return AiosqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True}, bind_key="async_multi_aiosqlite" ) @@ -176,7 +176,7 @@ async def get_aiosqlite_config(self) -> AiosqliteConfig: async def get_duckdb_config(self) -> DuckDBConfig: await asyncio.sleep(0.001) return DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True}, bind_key="async_multi_duckdb" ) @@ -227,7 +227,7 @@ class DatabaseProvider(Provider): @provide(scope=Scope.APP) def get_database_config(self, database_url: str, bind_key: str) -> SqliteConfig: return SqliteConfig( - pool_config={"database": database_url}, + connection_config={"database": database_url}, migration_config={"enabled": True, "script_location": "complex_migrations"}, bind_key=bind_key ) @@ -296,7 +296,7 @@ class MigrationProvider(Provider): async def get_database_config(self) -> SqliteConfig: await asyncio.sleep(0.001) return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={ "enabled": True, "script_location": "dishka_migrations" @@ -338,7 +338,7 @@ class ValidatedProvider(Provider): async def get_database_config(self) -> DuckDBConfig: await asyncio.sleep(0.001) return DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True}, bind_key="validated_dishka" ) @@ -387,7 +387,7 @@ async def get_primary_db_config(self) -> SqliteConfig: # Simulate loading config from environment or remote service await asyncio.sleep(0.002) # Simulate I/O return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={ "enabled": True, "script_location": "migrations/primary" @@ -399,7 +399,7 @@ async def get_primary_db_config(self) -> SqliteConfig: async def get_analytics_db_config(self) -> DuckDBConfig: await asyncio.sleep(0.002) return DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={ "enabled": True, "script_location": "migrations/analytics" @@ -446,7 +446,7 @@ class CleanupProvider(Provider): async def get_database_config(self) -> SqliteConfig: await asyncio.sleep(0.001) return SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True}, bind_key="cleanup_test" ) diff --git a/tests/integration/test_extensions/test_fastapi/test_fastapi_filters_integration.py b/tests/integration/test_extensions/test_fastapi/test_fastapi_filters_integration.py index 1a387567e..a3c3baf97 100644 --- a/tests/integration/test_extensions/test_fastapi/test_fastapi_filters_integration.py +++ b/tests/integration/test_extensions/test_fastapi/test_fastapi_filters_integration.py @@ -29,7 +29,7 @@ def test_fastapi_id_filter_dependency() -> None: """Test ID filter dependency with actual HTTP request.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sqlspec.add_config(config) @@ -63,7 +63,7 @@ def test_fastapi_search_filter_dependency() -> None: """Test search filter dependency with actual HTTP request.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) @@ -102,7 +102,7 @@ def test_fastapi_pagination_filter_dependency() -> None: """Test pagination filter dependency with actual HTTP request.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) @@ -144,7 +144,7 @@ def test_fastapi_order_by_filter_dependency() -> None: """Test order by filter dependency with actual HTTP request.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) @@ -186,7 +186,7 @@ def test_fastapi_date_range_filter_dependency() -> None: """Test date range filter dependency with actual HTTP request.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) @@ -239,7 +239,7 @@ def test_fastapi_multiple_filters_combined() -> None: """Test combining multiple filter types in one request.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) @@ -285,7 +285,7 @@ def test_fastapi_filter_with_actual_query_execution() -> None: """Test filters applied to actual SQL query execution.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) @@ -338,7 +338,7 @@ def test_fastapi_openapi_schema_includes_filter_params() -> None: """Test that OpenAPI schema includes filter query parameters.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) @@ -372,7 +372,7 @@ def test_fastapi_filter_validation_error() -> None: """Test that invalid filter values return proper validation errors.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "manual"}} ) sqlspec.add_config(config) diff --git a/tests/integration/test_extensions/test_fastapi/test_fastapi_integration.py b/tests/integration/test_extensions/test_fastapi/test_fastapi_integration.py index 5b03dc24c..5c1ab94ec 100644 --- a/tests/integration/test_extensions/test_fastapi/test_fastapi_integration.py +++ b/tests/integration/test_extensions/test_fastapi/test_fastapi_integration.py @@ -19,7 +19,7 @@ def test_fastapi_dependency_injection() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -45,7 +45,7 @@ def test_fastapi_connection_dependency() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -74,7 +74,7 @@ def test_fastapi_manual_commit() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -113,7 +113,7 @@ def test_fastapi_autocommit_mode() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "autocommit", "session_key": "db"}}, ) sql.add_config(config) @@ -149,7 +149,7 @@ def test_fastapi_session_caching_across_dependencies() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -175,7 +175,7 @@ def test_fastapi_complex_route_with_multiple_queries() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -237,7 +237,7 @@ def test_fastapi_inherits_starlette_behavior() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -265,7 +265,7 @@ def test_fastapi_default_session_key() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() - config = AiosqliteConfig(pool_config={"database": tmp.name}, extension_config={"starlette": {}}) + config = AiosqliteConfig(connection_config={"database": tmp.name}, extension_config={"starlette": {}}) sql.add_config(config) app = FastAPI() diff --git a/tests/integration/test_extensions/test_flask/test_flask_disable_di.py b/tests/integration/test_extensions/test_flask/test_flask_disable_di.py index 829c699d8..d727a8d93 100644 --- a/tests/integration/test_extensions/test_flask/test_flask_disable_di.py +++ b/tests/integration/test_extensions/test_flask/test_flask_disable_di.py @@ -16,7 +16,9 @@ def test_flask_disable_di_disables_hooks() -> None: """Test that disable_di disables request hooks in Flask extension.""" with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() - config = SqliteConfig(pool_config={"database": tmp.name}, extension_config={"flask": {"disable_di": True}}) + config = SqliteConfig( + connection_config={"database": tmp.name}, extension_config={"flask": {"disable_di": True}} + ) sql.add_config(config) app = Flask(__name__) @@ -55,7 +57,9 @@ def test_flask_default_di_enabled() -> None: """Test that default behavior has disable_di=False.""" with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() - config = SqliteConfig(pool_config={"database": tmp.name}, extension_config={"flask": {"session_key": "db"}}) + config = SqliteConfig( + connection_config={"database": tmp.name}, extension_config={"flask": {"session_key": "db"}} + ) sql.add_config(config) app = Flask(__name__) diff --git a/tests/integration/test_extensions/test_flask/test_flask_integration.py b/tests/integration/test_extensions/test_flask/test_flask_integration.py index d280c969b..da2755811 100644 --- a/tests/integration/test_extensions/test_flask/test_flask_integration.py +++ b/tests/integration/test_extensions/test_flask/test_flask_integration.py @@ -16,7 +16,8 @@ def test_flask_manual_mode_sync_sqlite() -> None: """Test Flask extension with manual commit mode and sync SQLite.""" sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "manual", "session_key": "db"}} + connection_config={"database": ":memory:"}, + extension_config={"flask": {"commit_mode": "manual", "session_key": "db"}}, ) sqlspec.add_config(config) @@ -56,7 +57,7 @@ def test_flask_autocommit_mode_sync_sqlite() -> None: """Test Flask extension with autocommit mode and sync SQLite.""" sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "autocommit", "session_key": "db"}}, ) sqlspec.add_config(config) @@ -94,7 +95,7 @@ def test_flask_autocommit_rollback_on_error() -> None: """Test Flask extension autocommit rolls back on error status.""" sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "autocommit", "session_key": "db"}}, ) sqlspec.add_config(config) @@ -136,7 +137,7 @@ def test_flask_autocommit_include_redirect() -> None: """Test Flask extension autocommit_include_redirect commits on 3xx.""" sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "autocommit_include_redirect", "session_key": "db"}}, ) sqlspec.add_config(config) @@ -178,12 +179,12 @@ def test_flask_multi_database() -> None: sqlspec = SQLSpec() sqlite_config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "autocommit", "session_key": "sqlite_db"}}, ) duckdb_config = DuckDBConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "autocommit", "session_key": "duckdb_db"}}, ) @@ -229,7 +230,8 @@ def test_flask_session_caching() -> None: """Test that sessions are cached per request.""" sqlspec = SQLSpec() config = SqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "manual", "session_key": "db"}} + connection_config={"database": ":memory:"}, + extension_config={"flask": {"commit_mode": "manual", "session_key": "db"}}, ) sqlspec.add_config(config) @@ -252,7 +254,7 @@ def test_flask_default_session_key() -> None: """Test default session key resolves to 'db_session'.""" sqlspec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) app = Flask(__name__) @@ -281,7 +283,7 @@ def test_flask_async_adapter_via_portal() -> None: sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, extension_config={"flask": {"commit_mode": "autocommit", "session_key": "db"}}, ) sqlspec.add_config(config) diff --git a/tests/integration/test_extensions/test_litestar/test_correlation_middleware.py b/tests/integration/test_extensions/test_litestar/test_correlation_middleware.py index c5c7b59b4..02a8c3ffe 100644 --- a/tests/integration/test_extensions/test_litestar/test_correlation_middleware.py +++ b/tests/integration/test_extensions/test_litestar/test_correlation_middleware.py @@ -32,7 +32,7 @@ def _build_app( litestar_settings["auto_trace_headers"] = auto_trace_headers spec = SQLSpec() - spec.add_config(SqliteConfig(pool_config={"database": ":memory:"}, extension_config=extension_config)) + spec.add_config(SqliteConfig(connection_config={"database": ":memory:"}, extension_config=extension_config)) return Litestar(route_handlers=[correlation_handler], plugins=[SQLSpecPlugin(sqlspec=spec)]) diff --git a/tests/integration/test_extensions/test_litestar/test_litestar_disable_di.py b/tests/integration/test_extensions/test_litestar/test_litestar_disable_di.py index c8d2968e7..5eaf8b524 100644 --- a/tests/integration/test_extensions/test_litestar/test_litestar_disable_di.py +++ b/tests/integration/test_extensions/test_litestar/test_litestar_disable_di.py @@ -18,7 +18,7 @@ def test_litestar_disable_di_disables_providers() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"disable_di": True}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"disable_di": True}} ) sql.add_config(config) plugin = SQLSpecPlugin(sqlspec=sql) @@ -49,7 +49,7 @@ def test_litestar_default_di_enabled() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"litestar": {"session_key": "db"}} + connection_config={"database": tmp.name}, extension_config={"litestar": {"session_key": "db"}} ) sql.add_config(config) plugin = SQLSpecPlugin(sqlspec=sql) diff --git a/tests/integration/test_extensions/test_starlette/test_starlette_disable_di.py b/tests/integration/test_extensions/test_starlette/test_starlette_disable_di.py index fe21bfab6..ddc6e941a 100644 --- a/tests/integration/test_extensions/test_starlette/test_starlette_disable_di.py +++ b/tests/integration/test_extensions/test_starlette/test_starlette_disable_di.py @@ -21,7 +21,7 @@ def test_starlette_disable_di_disables_middleware() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"starlette": {"disable_di": True}} + connection_config={"database": tmp.name}, extension_config={"starlette": {"disable_di": True}} ) sql.add_config(config) db_ext = SQLSpecPlugin(sql) @@ -50,7 +50,7 @@ def test_starlette_default_di_enabled() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, extension_config={"starlette": {"session_key": "db"}} + connection_config={"database": tmp.name}, extension_config={"starlette": {"session_key": "db"}} ) sql.add_config(config) db_ext = SQLSpecPlugin(sql) diff --git a/tests/integration/test_extensions/test_starlette/test_starlette_integration.py b/tests/integration/test_extensions/test_starlette/test_starlette_integration.py index e4efa4e38..03c5a0e47 100644 --- a/tests/integration/test_extensions/test_starlette/test_starlette_integration.py +++ b/tests/integration/test_extensions/test_starlette/test_starlette_integration.py @@ -21,7 +21,7 @@ def test_starlette_basic_query() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -47,7 +47,7 @@ def test_starlette_manual_commit_mode() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -84,7 +84,7 @@ def test_starlette_autocommit_mode() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "autocommit", "session_key": "db"}}, ) sql.add_config(config) @@ -119,7 +119,7 @@ def test_starlette_autocommit_rolls_back_on_error() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "autocommit", "session_key": "db"}}, ) sql.add_config(config) @@ -157,7 +157,7 @@ def test_starlette_session_caching() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -182,7 +182,7 @@ def test_starlette_connection_pool_lifecycle() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() config = AiosqliteConfig( - pool_config={"database": tmp.name}, + connection_config={"database": tmp.name}, extension_config={"starlette": {"commit_mode": "manual", "session_key": "db"}}, ) sql.add_config(config) @@ -207,7 +207,7 @@ def test_starlette_default_session_key() -> None: with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sql = SQLSpec() - config = AiosqliteConfig(pool_config={"database": tmp.name}, extension_config={"starlette": {}}) + config = AiosqliteConfig(connection_config={"database": tmp.name}, extension_config={"starlette": {}}) sql.add_config(config) db_ext = SQLSpecPlugin(sql) diff --git a/tests/integration/test_migrations/test_auto_sync.py b/tests/integration/test_migrations/test_auto_sync.py index e2b1f033f..627d34934 100644 --- a/tests/integration/test_migrations/test_auto_sync.py +++ b/tests/integration/test_migrations/test_auto_sync.py @@ -18,7 +18,7 @@ def sqlite_config(tmp_path: Path) -> Generator[SqliteConfig, None, None]: migrations_dir.mkdir() config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={ "script_location": str(migrations_dir), "version_table_name": "ddl_migrations", diff --git a/tests/integration/test_migrations/test_fix_checksum_stability.py b/tests/integration/test_migrations/test_fix_checksum_stability.py index 00ce4a813..0ebe8c069 100644 --- a/tests/integration/test_migrations/test_fix_checksum_stability.py +++ b/tests/integration/test_migrations/test_fix_checksum_stability.py @@ -15,7 +15,7 @@ @pytest.fixture def sqlite_config() -> Generator[SqliteConfig, None, None]: """Create SQLite config for migration testing.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) yield config config.close_pool() diff --git a/tests/integration/test_migrations/test_fix_idempotency_workflow.py b/tests/integration/test_migrations/test_fix_idempotency_workflow.py index 2fa5a85ee..3c801af1e 100644 --- a/tests/integration/test_migrations/test_fix_idempotency_workflow.py +++ b/tests/integration/test_migrations/test_fix_idempotency_workflow.py @@ -15,7 +15,7 @@ @pytest.fixture def sqlite_config() -> Generator[SqliteConfig, None, None]: """Create SQLite config for migration testing.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) yield config config.close_pool() diff --git a/tests/integration/test_migrations/test_schema_migration.py b/tests/integration/test_migrations/test_schema_migration.py index be4999482..c725fe6eb 100644 --- a/tests/integration/test_migrations/test_schema_migration.py +++ b/tests/integration/test_migrations/test_schema_migration.py @@ -11,7 +11,7 @@ @pytest.fixture def sqlite_config() -> Generator[SqliteConfig, None, None]: """Create SQLite config for testing.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) yield config config.close_pool() diff --git a/tests/integration/test_migrations/test_upgrade_downgrade_versions.py b/tests/integration/test_migrations/test_upgrade_downgrade_versions.py index 5107b72b0..84eec116e 100644 --- a/tests/integration/test_migrations/test_upgrade_downgrade_versions.py +++ b/tests/integration/test_migrations/test_upgrade_downgrade_versions.py @@ -18,7 +18,7 @@ def sqlite_config(tmp_path: Path) -> Generator[SqliteConfig, None, None]: migrations_dir.mkdir() config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"script_location": str(migrations_dir), "version_table_name": "ddl_migrations"}, ) yield config diff --git a/tests/integration/test_stack_edge_cases.py b/tests/integration/test_stack_edge_cases.py index 79bafc0c6..6b41c9817 100644 --- a/tests/integration/test_stack_edge_cases.py +++ b/tests/integration/test_stack_edge_cases.py @@ -13,7 +13,7 @@ @pytest.fixture() def sqlite_stack_session() -> "Generator[SqliteDriver, None, None]": - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) with config.provide_session() as session: session.execute_script( """ diff --git a/tests/unit/test_adapters/test_asyncpg/test_cloud_connectors.py b/tests/unit/test_adapters/test_asyncpg/test_cloud_connectors.py index c6b042047..b9109ae4f 100644 --- a/tests/unit/test_adapters/test_asyncpg/test_cloud_connectors.py +++ b/tests/unit/test_adapters/test_asyncpg/test_cloud_connectors.py @@ -53,13 +53,13 @@ def mock_alloydb_module(): def test_cloud_sql_defaults_to_false() -> None: """Cloud SQL connector should always default to False (explicit opt-in required).""" - config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/test"}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) assert config.driver_features["enable_cloud_sql"] is False def test_alloydb_defaults_to_false() -> None: """AlloyDB connector should always default to False (explicit opt-in required).""" - config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/test"}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) assert config.driver_features["enable_alloydb"] is False @@ -69,7 +69,7 @@ def test_mutual_exclusion_both_enabled_raises_error() -> None: ImproperConfigurationError, match="Cannot enable both Cloud SQL and AlloyDB connectors simultaneously" ): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance", @@ -83,7 +83,7 @@ def test_cloud_sql_missing_package_raises_error() -> None: """Enabling Cloud SQL without package installed should raise error.""" with pytest.raises(ImproperConfigurationError, match="cloud-sql-python-connector package not installed"): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance"}, ) @@ -93,7 +93,7 @@ def test_alloydb_missing_package_raises_error() -> None: with patch("sqlspec.adapters.asyncpg.config.ALLOYDB_CONNECTOR_INSTALLED", False): with pytest.raises(ImproperConfigurationError, match="cloud-alloydb-python-connector package not installed"): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/p/locations/r/clusters/c/instances/i", @@ -108,7 +108,7 @@ def test_cloud_sql_missing_instance_raises_error() -> None: ImproperConfigurationError, match="cloud_sql_instance required when enable_cloud_sql is True" ): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": True} + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": True} ) @@ -118,7 +118,9 @@ def test_alloydb_missing_instance_uri_raises_error() -> None: with pytest.raises( ImproperConfigurationError, match="alloydb_instance_uri required when enable_alloydb is True" ): - AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_alloydb": True}) + AsyncpgConfig( + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_alloydb": True} + ) def test_cloud_sql_invalid_instance_format_raises_error() -> None: @@ -126,7 +128,7 @@ def test_cloud_sql_invalid_instance_format_raises_error() -> None: with patch("sqlspec.adapters.asyncpg.config.CLOUD_SQL_CONNECTOR_INSTALLED", True): with pytest.raises(ImproperConfigurationError, match="Invalid Cloud SQL instance format"): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "invalid-format"}, ) @@ -136,7 +138,7 @@ def test_cloud_sql_instance_format_too_many_colons() -> None: with patch("sqlspec.adapters.asyncpg.config.CLOUD_SQL_CONNECTOR_INSTALLED", True): with pytest.raises(ImproperConfigurationError, match="Invalid Cloud SQL instance format"): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance:extra"}, ) @@ -146,7 +148,7 @@ def test_alloydb_invalid_instance_uri_format_raises_error() -> None: with patch("sqlspec.adapters.asyncpg.config.ALLOYDB_CONNECTOR_INSTALLED", True): with pytest.raises(ImproperConfigurationError, match="Invalid AlloyDB instance URI format"): AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_alloydb": True, "alloydb_instance_uri": "invalid-format"}, ) @@ -155,7 +157,7 @@ def test_cloud_sql_explicit_disable() -> None: """Explicitly disabling Cloud SQL should work even when package installed.""" with patch("sqlspec.adapters.asyncpg.config.CLOUD_SQL_CONNECTOR_INSTALLED", True): config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": False} + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_cloud_sql": False} ) assert config.driver_features["enable_cloud_sql"] is False @@ -164,14 +166,14 @@ def test_alloydb_explicit_disable() -> None: """Explicitly disabling AlloyDB should work even when package installed.""" with patch("sqlspec.adapters.asyncpg.config.ALLOYDB_CONNECTOR_INSTALLED", True): config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_alloydb": False} + connection_config={"dsn": "postgresql://localhost/test"}, driver_features={"enable_alloydb": False} ) assert config.driver_features["enable_alloydb"] is False def test_normal_config_without_connectors() -> None: """Normal config without connectors should work.""" - config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/test"}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) assert config is not None assert config.driver_features.get("enable_cloud_sql", False) is not True assert config.driver_features.get("enable_alloydb", False) is not True @@ -190,7 +192,7 @@ async def test_cloud_sql_connector_initialization(mock_cloud_sql_module) -> None mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance"}, ) @@ -225,7 +227,7 @@ async def mock_connect(**kwargs): mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "database": "testdb"}, + connection_config={"user": "testuser", "database": "testdb"}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance", @@ -256,7 +258,7 @@ async def mock_connect(**kwargs): mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance", @@ -287,7 +289,7 @@ async def mock_connect(**kwargs): mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={ "enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance", @@ -318,7 +320,7 @@ async def mock_connect(**kwargs): mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance"}, ) @@ -340,7 +342,7 @@ async def test_alloydb_connector_initialization(mock_alloydb_module) -> None: mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/p/locations/r/clusters/c/instances/i", @@ -378,7 +380,7 @@ async def mock_connect(**kwargs): mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "database": "testdb"}, + connection_config={"user": "testuser", "database": "testdb"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/p/locations/r/clusters/c/instances/i", @@ -409,7 +411,7 @@ async def mock_connect(**kwargs): mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/p/locations/r/clusters/c/instances/i", @@ -437,7 +439,7 @@ async def test_cloud_sql_connector_cleanup(mock_cloud_sql_module) -> None: mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance"}, ) @@ -463,7 +465,7 @@ async def test_alloydb_connector_cleanup(mock_alloydb_module) -> None: mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/p/locations/r/clusters/c/instances/i", @@ -490,7 +492,7 @@ async def test_connection_factory_pattern_cloud_sql(mock_cloud_sql_module) -> No mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={"enable_cloud_sql": True, "cloud_sql_instance": "project:region:instance"}, ) @@ -514,7 +516,7 @@ async def test_connection_factory_pattern_alloydb(mock_alloydb_module) -> None: mock_create_pool.return_value = mock_pool config = AsyncpgConfig( - pool_config={"user": "testuser", "password": "testpass", "database": "testdb"}, + connection_config={"user": "testuser", "password": "testpass", "database": "testdb"}, driver_features={ "enable_alloydb": True, "alloydb_instance_uri": "projects/p/locations/r/clusters/c/instances/i", @@ -531,11 +533,11 @@ async def test_connection_factory_pattern_alloydb(mock_alloydb_module) -> None: @pytest.mark.asyncio async def test_pool_close_without_connectors() -> None: """Closing pool without connectors should not raise errors.""" - config = AsyncpgConfig(pool_config={"dsn": "postgresql://localhost/test"}) + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) mock_pool = MagicMock() mock_pool.close = AsyncMock() - config.pool_instance = mock_pool + config.connection_instance = mock_pool await config._close_pool() diff --git a/tests/unit/test_adapters/test_duckdb/test_extension_flags.py b/tests/unit/test_adapters/test_duckdb/test_extension_flags.py index 5a2b7381c..2f7e03e28 100644 --- a/tests/unit/test_adapters/test_duckdb/test_extension_flags.py +++ b/tests/unit/test_adapters/test_duckdb/test_extension_flags.py @@ -8,10 +8,10 @@ def test_duckdb_config_promotes_security_flags() -> None: - """Extension flags should move from pool_config to driver_features.""" + """Extension flags should move from connection_config to driver_features.""" config = DuckDBConfig( - pool_config={ + connection_config={ "database": ":memory:", "allow_community_extensions": True, "allow_unsigned_extensions": False, @@ -25,16 +25,16 @@ def test_duckdb_config_promotes_security_flags() -> None: "allow_unsigned_extensions": False, "enable_external_access": True, } - assert "allow_community_extensions" not in config.pool_config - assert "allow_unsigned_extensions" not in config.pool_config - assert "enable_external_access" not in config.pool_config + assert "allow_community_extensions" not in config.connection_config + assert "allow_unsigned_extensions" not in config.connection_config + assert "enable_external_access" not in config.connection_config def test_duckdb_config_merges_existing_extension_flags() -> None: """Existing driver feature flags should merge with promoted ones.""" config = DuckDBConfig( - pool_config={"database": ":memory:", "allow_community_extensions": True}, + connection_config={"database": ":memory:", "allow_community_extensions": True}, driver_features={"extension_flags": {"custom": "value"}}, ) diff --git a/tests/unit/test_adapters/test_extension_config.py b/tests/unit/test_adapters/test_extension_config.py index 36bca448c..bb71dcadd 100644 --- a/tests/unit/test_adapters/test_extension_config.py +++ b/tests/unit/test_adapters/test_extension_config.py @@ -23,7 +23,7 @@ def test_sqlite_extension_config() -> None: "ExtensionConfigs", {"litestar": {"session_key": "custom_session", "commit_mode": "manual"}} ) - config = SqliteConfig(pool_config={"database": ":memory:"}, extension_config=extension_config) + config = SqliteConfig(connection_config={"database": ":memory:"}, extension_config=extension_config) assert config.extension_config == extension_config litestar_settings = cast("dict[str, Any]", config.extension_config["litestar"]) @@ -36,7 +36,7 @@ def test_aiosqlite_extension_config() -> None: ExtensionConfigs, {"litestar": {"pool_key": "db_pool", "enable_correlation_middleware": False}} ) - config = AiosqliteConfig(pool_config={"database": ":memory:"}, extension_config=extension_config) + config = AiosqliteConfig(connection_config={"database": ":memory:"}, extension_config=extension_config) assert config.extension_config == extension_config litestar_settings = cast("dict[str, Any]", config.extension_config["litestar"]) @@ -47,7 +47,7 @@ def test_duckdb_extension_config() -> None: """Test DuckDBConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"connection_key": "duckdb_conn"}}) - config = DuckDBConfig(pool_config={"database": ":memory:"}, extension_config=extension_config) + config = DuckDBConfig(connection_config={"database": ":memory:"}, extension_config=extension_config) assert config.extension_config == extension_config @@ -56,7 +56,9 @@ def test_asyncpg_extension_config() -> None: """Test AsyncpgConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"commit_mode": "autocommit"}}) - config = AsyncpgConfig(pool_config={"host": "localhost", "database": "test"}, extension_config=extension_config) + config = AsyncpgConfig( + connection_config={"host": "localhost", "database": "test"}, extension_config=extension_config + ) assert config.extension_config == extension_config @@ -65,7 +67,9 @@ def test_psycopg_sync_extension_config() -> None: """Test PsycopgSyncConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"session_key": "psycopg_session"}}) - config = PsycopgSyncConfig(pool_config={"host": "localhost", "dbname": "test"}, extension_config=extension_config) + config = PsycopgSyncConfig( + connection_config={"host": "localhost", "dbname": "test"}, extension_config=extension_config + ) assert config.extension_config == extension_config @@ -74,7 +78,9 @@ def test_psycopg_async_extension_config() -> None: """Test PsycopgAsyncConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"extra_commit_statuses": {201, 202}}}) - config = PsycopgAsyncConfig(pool_config={"host": "localhost", "dbname": "test"}, extension_config=extension_config) + config = PsycopgAsyncConfig( + connection_config={"host": "localhost", "dbname": "test"}, extension_config=extension_config + ) assert config.extension_config == extension_config @@ -83,7 +89,9 @@ def test_asyncmy_extension_config() -> None: """Test AsyncmyConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"commit_mode": "autocommit_include_redirect"}}) - config = AsyncmyConfig(pool_config={"host": "localhost", "database": "test"}, extension_config=extension_config) + config = AsyncmyConfig( + connection_config={"host": "localhost", "database": "test"}, extension_config=extension_config + ) assert config.extension_config == extension_config @@ -92,7 +100,7 @@ def test_psqlpy_extension_config() -> None: """Test PsqlpyConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"extra_rollback_statuses": {400, 500}}}) - config = PsqlpyConfig(pool_config={"host": "localhost", "db_name": "test"}, extension_config=extension_config) + config = PsqlpyConfig(connection_config={"host": "localhost", "db_name": "test"}, extension_config=extension_config) assert config.extension_config == extension_config @@ -101,7 +109,7 @@ def test_oracle_sync_extension_config() -> None: """Test OracleSyncConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"enable_correlation_middleware": True}}) - config = OracleSyncConfig(pool_config={"user": "test", "password": "test"}, extension_config=extension_config) + config = OracleSyncConfig(connection_config={"user": "test", "password": "test"}, extension_config=extension_config) assert config.extension_config == extension_config @@ -110,7 +118,9 @@ def test_oracle_async_extension_config() -> None: """Test OracleAsyncConfig accepts and stores extension_config.""" extension_config = cast("ExtensionConfigs", {"litestar": {"connection_key": "oracle_async"}}) - config = OracleAsyncConfig(pool_config={"user": "test", "password": "test"}, extension_config=extension_config) + config = OracleAsyncConfig( + connection_config={"user": "test", "password": "test"}, extension_config=extension_config + ) assert config.extension_config == extension_config @@ -138,16 +148,16 @@ def test_bigquery_extension_config() -> None: def test_extension_config_defaults_to_empty_dict() -> None: """Test that extension_config defaults to empty dict when not provided.""" configs = [ - SqliteConfig(pool_config={"database": ":memory:"}), - DuckDBConfig(pool_config={"database": ":memory:"}), - AiosqliteConfig(pool_config={"database": ":memory:"}), - AsyncpgConfig(pool_config={"host": "localhost"}), - PsycopgSyncConfig(pool_config={"host": "localhost"}), - PsycopgAsyncConfig(pool_config={"host": "localhost"}), - AsyncmyConfig(pool_config={"host": "localhost"}), - PsqlpyConfig(pool_config={"host": "localhost"}), - OracleSyncConfig(pool_config={"user": "test", "password": "test"}), - OracleAsyncConfig(pool_config={"user": "test", "password": "test"}), + SqliteConfig(connection_config={"database": ":memory:"}), + DuckDBConfig(connection_config={"database": ":memory:"}), + AiosqliteConfig(connection_config={"database": ":memory:"}), + AsyncpgConfig(connection_config={"host": "localhost"}), + PsycopgSyncConfig(connection_config={"host": "localhost"}), + PsycopgAsyncConfig(connection_config={"host": "localhost"}), + AsyncmyConfig(connection_config={"host": "localhost"}), + PsqlpyConfig(connection_config={"host": "localhost"}), + OracleSyncConfig(connection_config={"user": "test", "password": "test"}), + OracleAsyncConfig(connection_config={"user": "test", "password": "test"}), AdbcConfig(connection_config={"driver_name": "sqlite", "uri": "sqlite://:memory:"}), BigQueryConfig(connection_config={"project": "test"}), ] @@ -168,7 +178,7 @@ def test_extension_config_with_multiple_extensions() -> None: }, ) - config = SqliteConfig(pool_config={"database": ":memory:"}, extension_config=extension_config) + config = SqliteConfig(connection_config={"database": ":memory:"}, extension_config=extension_config) assert config.extension_config == extension_config assert len(config.extension_config) == 3 @@ -180,16 +190,16 @@ def test_extension_config_with_multiple_extensions() -> None: @pytest.mark.parametrize( "config_class,init_kwargs", [ - (SqliteConfig, {"pool_config": {"database": ":memory:"}}), - (AiosqliteConfig, {"pool_config": {"database": ":memory:"}}), - (DuckDBConfig, {"pool_config": {"database": ":memory:"}}), - (AsyncpgConfig, {"pool_config": {"host": "localhost"}}), - (PsycopgSyncConfig, {"pool_config": {"host": "localhost"}}), - (PsycopgAsyncConfig, {"pool_config": {"host": "localhost"}}), - (AsyncmyConfig, {"pool_config": {"host": "localhost"}}), - (PsqlpyConfig, {"pool_config": {"host": "localhost"}}), - (OracleSyncConfig, {"pool_config": {"user": "test", "password": "test"}}), - (OracleAsyncConfig, {"pool_config": {"user": "test", "password": "test"}}), + (SqliteConfig, {"connection_config": {"database": ":memory:"}}), + (AiosqliteConfig, {"connection_config": {"database": ":memory:"}}), + (DuckDBConfig, {"connection_config": {"database": ":memory:"}}), + (AsyncpgConfig, {"connection_config": {"host": "localhost"}}), + (PsycopgSyncConfig, {"connection_config": {"host": "localhost"}}), + (PsycopgAsyncConfig, {"connection_config": {"host": "localhost"}}), + (AsyncmyConfig, {"connection_config": {"host": "localhost"}}), + (PsqlpyConfig, {"connection_config": {"host": "localhost"}}), + (OracleSyncConfig, {"connection_config": {"user": "test", "password": "test"}}), + (OracleAsyncConfig, {"connection_config": {"user": "test", "password": "test"}}), (AdbcConfig, {"connection_config": {"driver_name": "sqlite", "uri": "sqlite://:memory:"}}), (BigQueryConfig, {"connection_config": {"project": "test"}}), ], diff --git a/tests/unit/test_adapters/test_spanner/test_config.py b/tests/unit/test_adapters/test_spanner/test_config.py index be5a77ab3..9ec810125 100644 --- a/tests/unit/test_adapters/test_spanner/test_config.py +++ b/tests/unit/test_adapters/test_spanner/test_config.py @@ -22,20 +22,20 @@ def with_cursor(self, connection): def test_config_initialization() -> None: """Test basic configuration initialization.""" config = SpannerSyncConfig( - pool_config={"project": "my-project", "instance_id": "my-instance", "database_id": "my-database"} + connection_config={"project": "my-project", "instance_id": "my-instance", "database_id": "my-database"} ) - assert config.pool_config is not None - assert config.pool_config["project"] == "my-project" - assert config.pool_config["instance_id"] == "my-instance" - assert config.pool_config["database_id"] == "my-database" + assert config.connection_config is not None + assert config.connection_config["project"] == "my-project" + assert config.connection_config["instance_id"] == "my-instance" + assert config.connection_config["database_id"] == "my-database" def test_config_defaults() -> None: """Test default values.""" - config = SpannerSyncConfig(pool_config={"project": "p", "instance_id": "i", "database_id": "d"}) - assert config.pool_config is not None - assert config.pool_config["min_sessions"] == 1 - assert config.pool_config["max_sessions"] == 10 + config = SpannerSyncConfig(connection_config={"project": "p", "instance_id": "i", "database_id": "d"}) + assert config.connection_config is not None + assert config.connection_config["min_sessions"] == 1 + assert config.connection_config["max_sessions"] == 10 def test_improper_configuration() -> None: @@ -47,7 +47,7 @@ def test_improper_configuration() -> None: def test_driver_features_defaults() -> None: """Test driver features defaults.""" - config = SpannerSyncConfig(pool_config={"project": "p", "instance_id": "i", "database_id": "d"}) + config = SpannerSyncConfig(connection_config={"project": "p", "instance_id": "i", "database_id": "d"}) assert config.driver_features["enable_uuid_conversion"] is True assert config.driver_features["json_serializer"] is not None @@ -98,7 +98,7 @@ def session(self): def snapshot(self, multi_use: bool = False): return _Ctx(snap_obj) - config = SpannerSyncConfig(pool_config={"project": "p", "instance_id": "i", "database_id": "d"}) + config = SpannerSyncConfig(connection_config={"project": "p", "instance_id": "i", "database_id": "d"}) config.get_database = lambda: _DB() # type: ignore[assignment] with config.provide_connection(transaction=True) as conn: @@ -150,7 +150,7 @@ def session(self): def snapshot(self, multi_use: bool = False): return _Ctx() - config = SpannerSyncConfig(pool_config={"project": "p", "instance_id": "i", "database_id": "d"}) + config = SpannerSyncConfig(connection_config={"project": "p", "instance_id": "i", "database_id": "d"}) config.get_database = lambda: _DB() # type: ignore[assignment] config.driver_type = _DummyDriver # type: ignore[assignment,misc] @@ -201,7 +201,7 @@ def session(self): def snapshot(self, multi_use: bool = False): return _Ctx() - config = SpannerSyncConfig(pool_config={"project": "p", "instance_id": "i", "database_id": "d"}) + config = SpannerSyncConfig(connection_config={"project": "p", "instance_id": "i", "database_id": "d"}) config.get_database = lambda: _DB() # type: ignore[assignment] config.driver_type = _DummyDriver # type: ignore[assignment,misc] diff --git a/tests/unit/test_base/test_config_instances.py b/tests/unit/test_base/test_config_instances.py index 9c72d7f17..8cbfaebf8 100644 --- a/tests/unit/test_base/test_config_instances.py +++ b/tests/unit/test_base/test_config_instances.py @@ -22,8 +22,8 @@ def test_multiple_same_type_configs() -> None: """Test that multiple configs of same adapter type are stored separately.""" manager = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) - config2 = DuckDBConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) + config2 = DuckDBConfig(connection_config={"database": ":memory:"}) handle1 = manager.add_config(config1) handle2 = manager.add_config(config2) @@ -37,7 +37,7 @@ def test_multiple_same_type_configs() -> None: def test_add_config_returns_same_instance() -> None: """Test that add_config returns the same config instance.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) result = manager.add_config(config) @@ -47,7 +47,7 @@ def test_add_config_returns_same_instance() -> None: def test_provide_session_auto_registers_config() -> None: """Test that provide_session auto-registers configs.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should be auto-registered when used with manager.provide_session(config) as session: @@ -60,7 +60,7 @@ def test_provide_session_auto_registers_config() -> None: def test_provide_connection_auto_registers_config() -> None: """Test that provide_connection auto-registers configs.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should be auto-registered when used with manager.provide_connection(config) as conn: @@ -73,7 +73,7 @@ def test_provide_connection_auto_registers_config() -> None: def test_get_connection_auto_registers_config() -> None: """Test that get_connection auto-registers configs.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should be auto-registered when used conn = manager.get_connection(config) @@ -87,7 +87,7 @@ def test_get_connection_auto_registers_config() -> None: def test_get_session_auto_registers_config() -> None: """Test that get_session auto-registers configs.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should be auto-registered when used session = manager.get_session(config) @@ -100,7 +100,7 @@ def test_get_session_auto_registers_config() -> None: def test_get_pool_auto_registers_config() -> None: """Test that get_pool auto-registers configs.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should be auto-registered when used pool = manager.get_pool(config) @@ -113,7 +113,7 @@ def test_get_pool_auto_registers_config() -> None: def test_close_pool_auto_registers_config() -> None: """Test that close_pool auto-registers configs.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should be auto-registered when used # Note: close_pool doesn't fail on unregistered configs, it just does nothing @@ -126,7 +126,7 @@ def test_close_pool_auto_registers_config() -> None: def test_registry_uses_id_as_key() -> None: """Test that registry uses id(config) as key.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -136,8 +136,8 @@ def test_registry_uses_id_as_key() -> None: def test_mixed_adapter_types_stored_separately() -> None: """Test that different adapter types are stored separately.""" manager = SQLSpec() - duckdb_config = DuckDBConfig(pool_config={"database": ":memory:"}) - sqlite_config = SqliteConfig(pool_config={"database": ":memory:"}) + duckdb_config = DuckDBConfig(connection_config={"database": ":memory:"}) + sqlite_config = SqliteConfig(connection_config={"database": ":memory:"}) manager.add_config(duckdb_config) manager.add_config(sqlite_config) @@ -150,7 +150,7 @@ def test_mixed_adapter_types_stored_separately() -> None: def test_config_overwrite_warning_on_duplicate_id() -> None: """Test that adding same config instance twice overwrites.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) manager.add_config(config) @@ -162,7 +162,7 @@ def test_config_overwrite_warning_on_duplicate_id() -> None: def test_registered_config_works_with_provide_session() -> None: """Test that registered configs work with provide_session.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -173,7 +173,7 @@ def test_registered_config_works_with_provide_session() -> None: def test_registered_config_works_with_provide_connection() -> None: """Test that registered configs work with provide_connection.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -184,7 +184,7 @@ def test_registered_config_works_with_provide_connection() -> None: def test_registered_config_works_with_get_connection() -> None: """Test that registered configs work with get_connection.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -195,7 +195,7 @@ def test_registered_config_works_with_get_connection() -> None: def test_registered_config_works_with_get_session() -> None: """Test that registered configs work with get_session.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -206,7 +206,7 @@ def test_registered_config_works_with_get_session() -> None: def test_registered_config_works_with_get_pool() -> None: """Test that registered configs work with get_pool.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -217,7 +217,7 @@ def test_registered_config_works_with_get_pool() -> None: def test_registered_config_works_with_close_pool() -> None: """Test that registered configs work with close_pool.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) @@ -228,8 +228,8 @@ def test_registered_config_works_with_close_pool() -> None: def test_multiple_configs_same_type_provide_session_independently() -> None: """Test that multiple configs of same type work independently with provide_session.""" manager = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) - config2 = DuckDBConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) + config2 = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config1) manager.add_config(config2) @@ -244,7 +244,7 @@ def test_multiple_configs_same_type_provide_session_independently() -> None: def test_instance_identity_preserved_through_add_config() -> None: """Test that config instance identity is preserved through add_config.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) original_id = id(config) returned_config = manager.add_config(config) @@ -256,8 +256,8 @@ def test_instance_identity_preserved_through_add_config() -> None: def test_configs_property_returns_dict_with_id_keys() -> None: """Test that configs property returns dict mapping id to config.""" manager = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) - config2 = SqliteConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) + config2 = SqliteConfig(connection_config={"database": ":memory:"}) manager.add_config(config1) manager.add_config(config2) @@ -274,8 +274,8 @@ def test_multiple_managers_have_independent_registries() -> None: manager1 = SQLSpec() manager2 = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) - config2 = DuckDBConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) + config2 = DuckDBConfig(connection_config={"database": ":memory:"}) manager1.add_config(config1) manager2.add_config(config2) @@ -292,7 +292,7 @@ def test_config_can_be_registered_with_multiple_managers() -> None: """Test that same config instance can be registered with multiple managers.""" manager1 = SQLSpec() manager2 = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager1.add_config(config) manager2.add_config(config) @@ -304,7 +304,7 @@ def test_config_can_be_registered_with_multiple_managers() -> None: def test_unregistered_after_manager_recreation() -> None: """Test that configs are unregistered when manager is recreated.""" - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager1 = SQLSpec() manager1.add_config(config) @@ -317,12 +317,12 @@ def test_unregistered_after_manager_recreation() -> None: def test_registry_survives_config_modifications() -> None: """Test that registry lookup works after config attributes are modified.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config) original_id = id(config) - config.pool_config["database"] = "test.db" + config.connection_config["database"] = "test.db" assert id(config) == original_id assert id(config) in manager.configs @@ -331,7 +331,7 @@ def test_registry_survives_config_modifications() -> None: def test_auto_registration_on_first_use() -> None: """Test that configs are automatically registered on first use.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) # Config should not be registered initially assert id(config) not in manager.configs @@ -347,9 +347,9 @@ def test_auto_registration_on_first_use() -> None: def test_three_configs_same_type_all_stored() -> None: """Test that more than two configs of same type are all stored.""" manager = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) - config2 = DuckDBConfig(pool_config={"database": ":memory:"}) - config3 = DuckDBConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) + config2 = DuckDBConfig(connection_config={"database": ":memory:"}) + config3 = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config1) manager.add_config(config2) @@ -371,7 +371,7 @@ def test_registry_clear_on_fresh_instance() -> None: def test_config_instance_is_handle_pattern() -> None: """Test the 'config instance IS the handle' pattern.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) handle = manager.add_config(config) @@ -385,8 +385,8 @@ def test_config_instance_is_handle_pattern() -> None: def test_multiple_sqlite_configs_stored_separately() -> None: """Test that multiple SQLite configs are stored separately.""" manager = SQLSpec() - config1 = SqliteConfig(pool_config={"database": ":memory:"}) - config2 = SqliteConfig(pool_config={"database": ":memory:"}) + config1 = SqliteConfig(connection_config={"database": ":memory:"}) + config2 = SqliteConfig(connection_config={"database": ":memory:"}) manager.add_config(config1) manager.add_config(config2) @@ -399,10 +399,10 @@ def test_multiple_sqlite_configs_stored_separately() -> None: def test_mixed_sqlite_duckdb_configs() -> None: """Test that mixed SQLite and DuckDB configs coexist.""" manager = SQLSpec() - sqlite1 = SqliteConfig(pool_config={"database": ":memory:"}) - duckdb1 = DuckDBConfig(pool_config={"database": ":memory:"}) - sqlite2 = SqliteConfig(pool_config={"database": ":memory:"}) - duckdb2 = DuckDBConfig(pool_config={"database": ":memory:"}) + sqlite1 = SqliteConfig(connection_config={"database": ":memory:"}) + duckdb1 = DuckDBConfig(connection_config={"database": ":memory:"}) + sqlite2 = SqliteConfig(connection_config={"database": ":memory:"}) + duckdb2 = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(sqlite1) manager.add_config(duckdb1) @@ -419,7 +419,7 @@ def test_mixed_sqlite_duckdb_configs() -> None: def test_config_not_in_registry_after_no_add() -> None: """Test that simply creating a config doesn't add it to registry.""" manager = SQLSpec() - config = DuckDBConfig(pool_config={"database": ":memory:"}) + config = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config) not in manager.configs @@ -427,9 +427,9 @@ def test_config_not_in_registry_after_no_add() -> None: def test_provide_session_with_correct_config_after_multiple_adds() -> None: """Test provide_session works with correct config after adding multiple.""" manager = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) - config2 = DuckDBConfig(pool_config={"database": ":memory:"}) - config3 = DuckDBConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) + config2 = DuckDBConfig(connection_config={"database": ":memory:"}) + config3 = DuckDBConfig(connection_config={"database": ":memory:"}) manager.add_config(config1) manager.add_config(config2) @@ -443,7 +443,7 @@ def test_all_methods_auto_register_configs() -> None: """Test that all methods auto-register configs on first use.""" # Test provide_session manager1 = SQLSpec() - config1 = DuckDBConfig(pool_config={"database": ":memory:"}) + config1 = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config1) not in manager1.configs with manager1.provide_session(config1): pass @@ -451,7 +451,7 @@ def test_all_methods_auto_register_configs() -> None: # Test provide_connection manager2 = SQLSpec() - config2 = DuckDBConfig(pool_config={"database": ":memory:"}) + config2 = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config2) not in manager2.configs with manager2.provide_connection(config2): pass @@ -459,7 +459,7 @@ def test_all_methods_auto_register_configs() -> None: # Test get_connection manager3 = SQLSpec() - config3 = DuckDBConfig(pool_config={"database": ":memory:"}) + config3 = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config3) not in manager3.configs conn = manager3.get_connection(config3) conn.close() @@ -467,21 +467,21 @@ def test_all_methods_auto_register_configs() -> None: # Test get_session manager4 = SQLSpec() - config4 = DuckDBConfig(pool_config={"database": ":memory:"}) + config4 = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config4) not in manager4.configs manager4.get_session(config4) assert id(config4) in manager4.configs # Test get_pool manager5 = SQLSpec() - config5 = DuckDBConfig(pool_config={"database": ":memory:"}) + config5 = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config5) not in manager5.configs manager5.get_pool(config5) assert id(config5) in manager5.configs # Test close_pool manager6 = SQLSpec() - config6 = DuckDBConfig(pool_config={"database": ":memory:"}) + config6 = DuckDBConfig(connection_config={"database": ":memory:"}) assert id(config6) not in manager6.configs manager6.close_pool(config6) assert id(config6) in manager6.configs diff --git a/tests/unit/test_base/test_sql_integration.py b/tests/unit/test_base/test_sql_integration.py index 3055300e5..b2506eb94 100644 --- a/tests/unit/test_base/test_sql_integration.py +++ b/tests/unit/test_base/test_sql_integration.py @@ -205,7 +205,7 @@ def test_sql_integration_with_existing_functionality() -> None: sql_spec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) returned_config = sql_spec.add_config(config) sql_spec.add_named_sql("get_users", "SELECT * FROM users") @@ -267,7 +267,7 @@ def test_backwards_compatibility() -> None: from sqlspec.adapters.sqlite import SqliteConfig sql_spec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) sql_spec.add_config(config) with sql_spec.provide_session(config) as session: diff --git a/tests/unit/test_cli/test_config_loading.py b/tests/unit/test_cli/test_config_loading.py index d03e341ed..a6ab12b85 100644 --- a/tests/unit/test_cli/test_config_loading.py +++ b/tests/unit/test_cli/test_config_loading.py @@ -44,7 +44,7 @@ def test_direct_config_instance_loading( config = SqliteConfig( bind_key="test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "migrations"} ) database_config = config @@ -73,7 +73,7 @@ def test_sync_callable_config_loading( def get_database_config(): config = SqliteConfig( bind_key="sync_test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True} ) return config @@ -105,7 +105,7 @@ async def get_database_config(): await asyncio.sleep(0.001) config = SqliteConfig( bind_key="async_test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True} ) return config @@ -136,7 +136,7 @@ def test_show_config_with_path_object( config = SqliteConfig( bind_key="path_test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": Path("custom_migrations")} ) database_config = config diff --git a/tests/unit/test_cli/test_migration_commands.py b/tests/unit/test_cli/test_migration_commands.py index c7987bfa2..65a2711fb 100644 --- a/tests/unit/test_cli/test_migration_commands.py +++ b/tests/unit/test_cli/test_migration_commands.py @@ -48,7 +48,7 @@ def test_show_config_command(tmp_path: Path, monkeypatch: pytest.MonkeyPatch, cl def get_config(): config = SqliteConfig( bind_key="migration_test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={ "enabled": True, "script_location": "migrations" @@ -79,13 +79,13 @@ def test_show_config_with_multiple_configs( def get_configs(): sqlite_config = SqliteConfig( bind_key="sqlite_migrations", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "sqlite_migrations"} ) duckdb_config = DuckDBConfig( bind_key="duckdb_migrations", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "duckdb_migrations"} ) @@ -113,7 +113,7 @@ def get_config(): # Config without migration_config config = SqliteConfig( bind_key="no_migrations", - pool_config={"database": ":memory:"} + connection_config={"database": ":memory:"} ) return config """ @@ -146,7 +146,7 @@ def test_show_current_revision_command( def get_config(): config = SqliteConfig( bind_key="revision_test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "migrations"} ) return config @@ -177,7 +177,7 @@ def test_show_current_revision_verbose( def get_config(): config = SqliteConfig( bind_key="verbose_test", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True} ) return config @@ -208,7 +208,7 @@ def test_init_command( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "init_test" config.migration_config = {"script_location": "test_migrations"} return config @@ -237,7 +237,7 @@ def test_init_command_custom_directory( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "custom_init" config.migration_config = {"script_location": "migrations"} return config @@ -268,7 +268,7 @@ def test_create_migration_command( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "revision_test" config.migration_config = {"enabled": True} return config @@ -300,7 +300,7 @@ def test_make_migration_alias( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "revision_test" config.migration_config = {"enabled": True} return config @@ -331,7 +331,7 @@ def test_create_migration_command_with_format( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "revision_test" config.migration_config = {"enabled": True} return config @@ -371,7 +371,7 @@ def test_create_migration_command_with_file_type_alias( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "revision_test" config.migration_config = {"enabled": True} return config @@ -412,7 +412,7 @@ def test_upgrade_command( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "upgrade_test" config.migration_config = {"enabled": True} return config @@ -443,7 +443,7 @@ def test_upgrade_command_specific_revision( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "upgrade_revision_test" config.migration_config = {"enabled": True} return config @@ -474,7 +474,7 @@ def test_downgrade_command( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "downgrade_test" config.migration_config = {"enabled": True} return config @@ -505,7 +505,7 @@ def test_stamp_command( from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "stamp_test" config.migration_config = {"enabled": True} return config @@ -535,11 +535,11 @@ def test_multi_config_operations( from sqlspec.adapters.duckdb.config import DuckDBConfig def get_configs(): - sqlite_config = SqliteConfig(pool_config={"database": ":memory:"}) + sqlite_config = SqliteConfig(connection_config={"database": ":memory:"}) sqlite_config.bind_key = "sqlite_multi" sqlite_config.migration_config = {"enabled": True} - duckdb_config = DuckDBConfig(pool_config={"database": ":memory:"}) + duckdb_config = DuckDBConfig(connection_config={"database": ":memory:"}) duckdb_config.bind_key = "duckdb_multi" duckdb_config.migration_config = {"enabled": True} @@ -573,11 +573,11 @@ def test_dry_run_operations( from sqlspec.adapters.sqlite.config import SqliteConfig def get_configs(): - config1 = SqliteConfig(pool_config={"database": ":memory:"}) + config1 = SqliteConfig(connection_config={"database": ":memory:"}) config1.bind_key = "dry_run_test1" config1.migration_config = {"enabled": True} - config2 = SqliteConfig(pool_config={"database": "test.db"}) + config2 = SqliteConfig(connection_config={"database": "test.db"}) config2.bind_key = "dry_run_test2" config2.migration_config = {"enabled": True} @@ -603,7 +603,7 @@ def test_execution_mode_reporting(tmp_path: Path, monkeypatch: pytest.MonkeyPatc from sqlspec.adapters.sqlite.config import SqliteConfig def get_config(): - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) config.bind_key = "execution_mode_test" config.migration_config = {"enabled": True} return config @@ -637,7 +637,7 @@ def test_bind_key_filtering_single_config( def get_config(): return SqliteConfig( bind_key="target_config", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "migrations"} ) """ @@ -666,19 +666,19 @@ def test_bind_key_filtering_multiple_configs( def get_configs(): sqlite_config = SqliteConfig( bind_key="sqlite_db", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "sqlite_migrations"} ) duckdb_config = DuckDBConfig( bind_key="duckdb_db", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "duckdb_migrations"} ) postgres_config = SqliteConfig( bind_key="postgres_db", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True, "script_location": "postgres_migrations"} ) @@ -713,7 +713,7 @@ def get_configs(): return [ SqliteConfig( bind_key="existing_config", - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"enabled": True} ) ] @@ -748,12 +748,12 @@ def get_multi_configs(): return [ SqliteConfig( bind_key="primary_db", - pool_config={"database": "primary.db"}, + connection_config={"database": "primary.db"}, migration_config={"enabled": True, "script_location": "primary_migrations"} ), DuckDBConfig( bind_key="analytics_db", - pool_config={"database": "analytics.duckdb"}, + connection_config={"database": "analytics.duckdb"}, migration_config={"enabled": True, "script_location": "analytics_migrations"} ) ] diff --git a/tests/unit/test_config/test_connection_config_edge_cases.py b/tests/unit/test_config/test_connection_config_edge_cases.py new file mode 100644 index 000000000..de2428003 --- /dev/null +++ b/tests/unit/test_config/test_connection_config_edge_cases.py @@ -0,0 +1,426 @@ +"""Edge case tests for connection_config and connection_instance parameters. + +Tests unusual scenarios, boundary conditions, and error cases for the +standardized parameter naming. +""" + +import pytest + +from sqlspec.adapters.aiosqlite.config import AiosqliteConfig +from sqlspec.adapters.asyncpg.config import AsyncpgConfig +from sqlspec.adapters.duckdb.config import DuckDBConfig +from sqlspec.adapters.sqlite.config import SqliteConfig + + +def test_connection_config_with_zero_pool_size() -> None: + """Test connection_config with zero pool size parameters.""" + config = SqliteConfig(connection_config={"database": ":memory:", "pool_min_size": 0, "pool_max_size": 0}) + + assert config.connection_config["pool_min_size"] == 0 + assert config.connection_config["pool_max_size"] == 0 + + +def test_connection_config_with_negative_pool_size() -> None: + """Test connection_config with negative pool size parameters.""" + config = DuckDBConfig(connection_config={"database": ":memory:", "pool_min_size": -1, "pool_max_size": -1}) + + # Negative values are stored but pool creation may validate them + assert config.connection_config["pool_min_size"] == -1 + assert config.connection_config["pool_max_size"] == -1 + + +def test_connection_config_with_very_large_pool_size() -> None: + """Test connection_config with very large pool size values.""" + config = AsyncpgConfig( + connection_config={"dsn": "postgresql://localhost/test", "min_size": 1000, "max_size": 10000} + ) + + assert config.connection_config["min_size"] == 1000 + assert config.connection_config["max_size"] == 10000 + + +def test_connection_config_with_min_greater_than_max() -> None: + """Test connection_config with min_size > max_size (invalid but stored).""" + config = SqliteConfig(connection_config={"database": ":memory:", "pool_min_size": 10, "pool_max_size": 5}) + + # Config stores values, validation happens at pool creation + assert config.connection_config["pool_min_size"] == 10 + assert config.connection_config["pool_max_size"] == 5 + + +def test_connection_config_with_special_characters_in_strings() -> None: + """Test connection_config with special characters in string values.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://user:p@ss!w0rd#$%@localhost/test?sslmode=require", + "server_settings": {"application_name": "app with spaces & symbols!"}, + } + ) + + assert "p@ss!w0rd#$%" in config.connection_config["dsn"] + assert config.connection_config["server_settings"]["application_name"] == "app with spaces & symbols!" + + +def test_connection_config_with_unicode_strings() -> None: + """Test connection_config with unicode characters.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", + "server_settings": {"application_name": "テスト アプリ"}, + } + ) + + assert config.connection_config["server_settings"]["application_name"] == "テスト アプリ" + + +def test_connection_config_with_empty_strings() -> None: + """Test connection_config with empty string values.""" + config = AsyncpgConfig(connection_config={"dsn": "", "user": "", "password": ""}) + + assert config.connection_config["dsn"] == "" + assert config.connection_config["user"] == "" + assert config.connection_config["password"] == "" + + +def test_connection_config_with_none_values_in_dict() -> None: + """Test connection_config with None values for keys.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "user": None, "password": None}) + + assert config.connection_config["dsn"] == "postgresql://localhost/test" + assert config.connection_config["user"] is None + assert config.connection_config["password"] is None + + +def test_connection_config_with_boolean_false_values() -> None: + """Test connection_config with False boolean values.""" + config = SqliteConfig( + connection_config={"database": ":memory:", "check_same_thread": False, "cached_statements": 0} + ) + + assert config.connection_config["check_same_thread"] is False + assert config.connection_config["cached_statements"] == 0 + + +def test_connection_config_with_mixed_types() -> None: + """Test connection_config with various Python types.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", + "min_size": 5, + "timeout": 30.5, + "ssl": True, + "server_settings": {"key": "value"}, + "record_class": None, + } + ) + + assert isinstance(config.connection_config["dsn"], str) + assert isinstance(config.connection_config["min_size"], int) + assert isinstance(config.connection_config["timeout"], float) + assert isinstance(config.connection_config["ssl"], bool) + assert isinstance(config.connection_config["server_settings"], dict) + assert config.connection_config["record_class"] is None + + +def test_connection_config_modification_after_creation() -> None: + """Test that connection_config can be modified after config creation.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + # Modify existing key + config.connection_config["dsn"] = "postgresql://localhost/test2" + assert config.connection_config["dsn"] == "postgresql://localhost/test2" + + # Add new key + config.connection_config["min_size"] = 5 + assert config.connection_config["min_size"] == 5 + + # Delete key + del config.connection_config["min_size"] + assert "min_size" not in config.connection_config + + +def test_connection_config_clear_after_creation() -> None: + """Test that connection_config can be cleared after creation.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + config.connection_config.clear() + + assert config.connection_config == {} + + +def test_connection_config_update_method() -> None: + """Test that connection_config supports dict update() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + config.connection_config.update({"min_size": 5, "max_size": 10}) + + assert config.connection_config["min_size"] == 5 + assert config.connection_config["max_size"] == 10 + + +def test_connection_config_with_deeply_nested_dicts() -> None: + """Test connection_config with deeply nested dict structures.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", + "server_settings": {"level1": {"level2": {"level3": {"key": "value"}}}}, + } + ) + + assert config.connection_config["server_settings"]["level1"]["level2"]["level3"]["key"] == "value" + + +def test_connection_config_with_list_values() -> None: + """Test connection_config with list values.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", + "server_settings": {"extensions": ["pg_trgm", "pgcrypto", "uuid-ossp"]}, + } + ) + + assert config.connection_config["server_settings"]["extensions"] == ["pg_trgm", "pgcrypto", "uuid-ossp"] + + +def test_connection_config_with_tuple_values() -> None: + """Test connection_config with tuple values.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "ssl": ("require", "verify-ca")}) + + assert config.connection_config["ssl"] == ("require", "verify-ca") + + +def test_connection_instance_set_to_arbitrary_object() -> None: + """Test that connection_instance can be set to any object (no type checking).""" + + class FakePool: + pass + + fake_pool = FakePool() + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=fake_pool) # type: ignore[arg-type] + + assert config.connection_instance is fake_pool # type: ignore[comparison-overlap] + + +def test_connection_instance_can_be_modified_after_creation() -> None: + """Test that connection_instance can be modified after config creation.""" + from unittest.mock import MagicMock + + config = DuckDBConfig(connection_config={"database": ":memory:"}) + assert config.connection_instance is None + + mock_pool = MagicMock() + config.connection_instance = mock_pool + + assert config.connection_instance is mock_pool + + +def test_multiple_configs_do_not_share_connection_config() -> None: + """Test that modifying one config's connection_config doesn't affect another.""" + config1 = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db1"}) + config2 = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db2"}) + + # Modify config1 + config1.connection_config["min_size"] = 5 + + # config2 should not be affected + assert "min_size" not in config2.connection_config + assert config2.connection_config["dsn"] == "postgresql://localhost/db2" + + +def test_connection_config_dict_reference_semantics() -> None: + """Test that connection_config has dict reference semantics.""" + test_dict = {"dsn": "postgresql://localhost/test"} + config = AsyncpgConfig(connection_config=test_dict) + + # Modifying the original dict should NOT affect config + # (because config stores a copy or processes it) + test_dict["min_size"] = 5 # pyright: ignore[reportArgumentType] + + # Depending on implementation, this may or may not affect config + # Let's test the actual behavior + if "min_size" in config.connection_config: + # If it's a reference, this would be True + assert config.connection_config["min_size"] == 5 + # If it's a copy, min_size won't be in config.connection_config + + +def test_connection_config_with_callables() -> None: + """Test connection_config with callable values.""" + + def custom_init() -> str: + return "initialized" + + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "init": custom_init}) + + assert callable(config.connection_config["init"]) + assert config.connection_config["init"]() == "initialized" + + +def test_connection_config_with_very_long_strings() -> None: + """Test connection_config with very long string values.""" + long_string = "x" * 10000 + config = AsyncpgConfig(connection_config={"dsn": f"postgresql://localhost/{long_string}"}) + + assert len(config.connection_config["dsn"]) > 10000 + + +def test_connection_config_key_with_reserved_python_keywords() -> None: + """Test connection_config keys that are Python reserved words.""" + # Note: This is valid in dict keys even if they're reserved words + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", + "class": "value", # reserved word + "def": "value", # reserved word + "return": "value", # reserved word + } + ) + + assert config.connection_config["class"] == "value" + assert config.connection_config["def"] == "value" + assert config.connection_config["return"] == "value" + + +def test_connection_config_numeric_keys() -> None: + """Test connection_config with numeric keys (valid dict keys).""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", 1: "value", 2.5: "value"}) # type: ignore[dict-item,misc] + + assert config.connection_config[1] == "value" # type: ignore[index] + assert config.connection_config[2.5] == "value" # type: ignore[index] + + +def test_connection_instance_remains_after_connection_config_change() -> None: + """Test that connection_instance persists when connection_config is modified.""" + from unittest.mock import MagicMock + + mock_pool = MagicMock() + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}, connection_instance=mock_pool) + + # Modify connection_config + config.connection_config["min_size"] = 5 + + # connection_instance should remain unchanged + assert config.connection_instance is mock_pool + + +def test_connection_config_with_bytes_values() -> None: + """Test connection_config with bytes values.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "ssl_cert": b"certificate data"}) + + assert config.connection_config["ssl_cert"] == b"certificate data" + + +@pytest.mark.asyncio +async def test_aiosqlite_connection_config_with_pathlib_path() -> None: + """Test that connection_config accepts pathlib.Path objects.""" + from pathlib import Path + + db_path = Path(":memory:") + config = AiosqliteConfig(connection_config={"database": db_path}) + + assert config.connection_config["database"] == db_path + + +def test_connection_config_setdefault_method() -> None: + """Test that connection_config supports dict setdefault() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + result = config.connection_config.setdefault("min_size", 5) + + assert result == 5 + assert config.connection_config["min_size"] == 5 + + # setdefault on existing key should return existing value + result = config.connection_config.setdefault("dsn", "other_dsn") + assert result == "postgresql://localhost/test" + + +def test_connection_config_get_method_with_default() -> None: + """Test that connection_config supports dict get() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert config.connection_config.get("min_size", 5) == 5 + assert config.connection_config.get("dsn") == "postgresql://localhost/test" + + +def test_connection_config_pop_method() -> None: + """Test that connection_config supports dict pop() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + popped_value = config.connection_config.pop("min_size") + + assert popped_value == 5 + assert "min_size" not in config.connection_config + + +def test_connection_config_items_method() -> None: + """Test that connection_config supports dict items() iteration.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5, "max_size": 10}) + + items = list(config.connection_config.items()) + + assert len(items) == 3 + assert ("dsn", "postgresql://localhost/test") in items + assert ("min_size", 5) in items + assert ("max_size", 10) in items + + +def test_connection_config_keys_method() -> None: + """Test that connection_config supports dict keys() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + keys = list(config.connection_config.keys()) + + assert "dsn" in keys + assert "min_size" in keys + assert len(keys) == 2 + + +def test_connection_config_values_method() -> None: + """Test that connection_config supports dict values() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + values = list(config.connection_config.values()) + + assert "postgresql://localhost/test" in values + assert 5 in values + assert len(values) == 2 + + +def test_connection_config_in_operator() -> None: + """Test that connection_config supports 'in' operator.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert "dsn" in config.connection_config + assert "min_size" not in config.connection_config + + +def test_connection_config_len_function() -> None: + """Test that connection_config supports len() function.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + assert len(config.connection_config) == 2 + + +def test_connection_config_bool_evaluation() -> None: + """Test that connection_config evaluates to bool correctly.""" + # Use AsyncpgConfig which doesn't add default values + config_empty = AsyncpgConfig(connection_config={}) + config_with_data = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert not bool(config_empty.connection_config) + assert bool(config_with_data.connection_config) + + +def test_connection_config_copy_method() -> None: + """Test that connection_config supports dict copy() method.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + config_copy = config.connection_config.copy() + + assert config_copy == config.connection_config + assert config_copy is not config.connection_config # Should be a shallow copy + + # Modifying copy should not affect original + config_copy["max_size"] = 10 + assert "max_size" not in config.connection_config diff --git a/tests/unit/test_config/test_connection_config_parameters.py b/tests/unit/test_config/test_connection_config_parameters.py new file mode 100644 index 000000000..4082e7e11 --- /dev/null +++ b/tests/unit/test_config/test_connection_config_parameters.py @@ -0,0 +1,544 @@ +"""Unit tests for connection_config and connection_instance parameters. + +Tests the standardized parameter naming across all database adapters: +- connection_config (dict for connection/pool settings) +- connection_instance (pre-created pool/connection instance) + +This test suite validates the refactoring from pool_config → connection_config +and pool_instance → connection_instance across all 11 adapters. + +Key aspects tested: +1. Parameter acceptance and storage +2. Default empty dict for connection_config +3. None handling for connection_instance +4. Type validation +5. Configuration merging and overrides +""" + +import pytest + +from sqlspec.adapters.adbc.config import AdbcConfig +from sqlspec.adapters.aiosqlite.config import AiosqliteConfig +from sqlspec.adapters.asyncmy.config import AsyncmyConfig +from sqlspec.adapters.asyncpg.config import AsyncpgConfig +from sqlspec.adapters.bigquery.config import BigQueryConfig +from sqlspec.adapters.duckdb.config import DuckDBConfig +from sqlspec.adapters.oracledb.config import OracleAsyncConfig, OracleSyncConfig +from sqlspec.adapters.psqlpy.config import PsqlpyConfig +from sqlspec.adapters.psycopg.config import PsycopgAsyncConfig, PsycopgSyncConfig +from sqlspec.adapters.spanner.config import SpannerSyncConfig +from sqlspec.adapters.sqlite.config import SqliteConfig + + +def test_connection_config_parameter_accepts_dict() -> None: + """Test that connection_config parameter accepts dict values.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test", "min_size": 5}) + + assert config.connection_config["dsn"] == "postgresql://localhost/test" + assert config.connection_config["min_size"] == 5 + + +def test_connection_config_defaults_to_empty_dict() -> None: + """Test that connection_config defaults to empty dict when not provided.""" + # Use AsyncpgConfig which doesn't modify connection_config + config = AsyncpgConfig() + + assert config.connection_config == {} + assert isinstance(config.connection_config, dict) + + +def test_connection_config_accepts_none_and_converts_to_empty_dict() -> None: + """Test that connection_config=None is converted to empty dict.""" + # Use AsyncpgConfig which doesn't modify connection_config + config = AsyncpgConfig(connection_config=None) + + assert config.connection_config == {} + assert isinstance(config.connection_config, dict) + + +def test_connection_instance_defaults_to_none() -> None: + """Test that connection_instance defaults to None when not provided.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert config.connection_instance is None + + +def test_connection_instance_accepts_none_explicitly() -> None: + """Test that connection_instance=None is explicitly accepted.""" + config = PsycopgAsyncConfig(connection_config={"conninfo": "postgresql://localhost/test"}, connection_instance=None) + + assert config.connection_instance is None + + +def test_connection_config_stored_in_base_class() -> None: + """Test that connection_config is stored in the base class attribute.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert hasattr(config, "connection_config") + assert config.connection_config["dsn"] == "postgresql://localhost/test" + + +def test_connection_instance_stored_in_base_class() -> None: + """Test that connection_instance is stored in the base class attribute.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert hasattr(config, "connection_instance") + assert config.connection_instance is None + + +def test_asyncpg_config_accepts_connection_parameters() -> None: + """Test AsyncpgConfig accepts connection_config and connection_instance.""" + config = AsyncpgConfig( + connection_config={"dsn": "postgresql://localhost/test", "min_size": 5, "max_size": 10, "timeout": 30.0}, + connection_instance=None, + ) + + assert config.connection_config["dsn"] == "postgresql://localhost/test" + assert config.connection_config["min_size"] == 5 + assert config.connection_config["max_size"] == 10 + assert config.connection_config["timeout"] == 30.0 + assert config.connection_instance is None + + +def test_psycopg_async_config_accepts_connection_parameters() -> None: + """Test PsycopgAsyncConfig accepts connection_config and connection_instance.""" + config = PsycopgAsyncConfig( + connection_config={"conninfo": "postgresql://localhost/test", "min_size": 2, "max_size": 20}, + connection_instance=None, + ) + + assert config.connection_config["conninfo"] == "postgresql://localhost/test" + assert config.connection_config["min_size"] == 2 + assert config.connection_config["max_size"] == 20 + assert config.connection_instance is None + + +def test_psycopg_sync_config_accepts_connection_parameters() -> None: + """Test PsycopgSyncConfig accepts connection_config and connection_instance.""" + config = PsycopgSyncConfig( + connection_config={"conninfo": "postgresql://localhost/test", "min_size": 1, "max_size": 5}, + connection_instance=None, + ) + + assert config.connection_config["conninfo"] == "postgresql://localhost/test" + assert config.connection_config["min_size"] == 1 + assert config.connection_config["max_size"] == 5 + assert config.connection_instance is None + + +def test_asyncmy_config_accepts_connection_parameters() -> None: + """Test AsyncmyConfig accepts connection_config and connection_instance.""" + config = AsyncmyConfig( + connection_config={ + "host": "localhost", + "port": 3306, + "user": "root", + "password": "password", + "database": "test", + "minsize": 1, + "maxsize": 10, + }, + connection_instance=None, + ) + + assert config.connection_config["host"] == "localhost" + assert config.connection_config["port"] == 3306 + assert config.connection_config["database"] == "test" + assert config.connection_config["minsize"] == 1 + assert config.connection_config["maxsize"] == 10 + assert config.connection_instance is None + + +def test_psqlpy_config_accepts_connection_parameters() -> None: + """Test PsqlpyConfig accepts connection_config and connection_instance.""" + config = PsqlpyConfig( + connection_config={"dsn": "postgresql://localhost/test", "max_db_pool_size": 10}, connection_instance=None + ) + + assert config.connection_config["dsn"] == "postgresql://localhost/test" + assert config.connection_config["max_db_pool_size"] == 10 + assert config.connection_instance is None + + +def test_oracle_async_config_accepts_connection_parameters() -> None: + """Test OracleAsyncConfig accepts connection_config and connection_instance.""" + config = OracleAsyncConfig( + connection_config={ + "user": "system", + "password": "password", + "dsn": "localhost:1521/ORCLPDB1", + "min": 1, + "max": 5, + }, + connection_instance=None, + ) + + assert config.connection_config["user"] == "system" + assert config.connection_config["dsn"] == "localhost:1521/ORCLPDB1" + assert config.connection_config["min"] == 1 + assert config.connection_config["max"] == 5 + assert config.connection_instance is None + + +def test_oracle_sync_config_accepts_connection_parameters() -> None: + """Test OracleSyncConfig accepts connection_config and connection_instance.""" + config = OracleSyncConfig( + connection_config={ + "user": "system", + "password": "password", + "dsn": "localhost:1521/ORCLPDB1", + "min": 2, + "max": 10, + }, + connection_instance=None, + ) + + assert config.connection_config["user"] == "system" + assert config.connection_config["dsn"] == "localhost:1521/ORCLPDB1" + assert config.connection_config["min"] == 2 + assert config.connection_config["max"] == 10 + assert config.connection_instance is None + + +def test_sqlite_config_accepts_connection_parameters() -> None: + """Test SqliteConfig accepts connection_config and connection_instance.""" + config = SqliteConfig( + connection_config={"database": ":memory:", "check_same_thread": False, "pool_min_size": 5, "pool_max_size": 10}, + connection_instance=None, + ) + + # SQLite converts :memory: to shared memory URI for pooling + assert "memory" in config.connection_config["database"] + assert config.connection_config["check_same_thread"] is False + assert config.connection_config["pool_min_size"] == 5 + assert config.connection_config["pool_max_size"] == 10 + assert config.connection_instance is None + + +def test_aiosqlite_config_accepts_connection_parameters() -> None: + """Test AiosqliteConfig accepts connection_config and connection_instance.""" + config = AiosqliteConfig( + connection_config={"database": ":memory:", "timeout": 10.0, "pool_min_size": 2, "pool_max_size": 8}, + connection_instance=None, + ) + + # AioSQLite converts :memory: to shared memory URI for pooling + assert "memory" in config.connection_config["database"] + assert config.connection_config["timeout"] == 10.0 + assert config.connection_config["pool_min_size"] == 2 + assert config.connection_config["pool_max_size"] == 8 + assert config.connection_instance is None + + +def test_duckdb_config_accepts_connection_parameters() -> None: + """Test DuckDBConfig accepts connection_config and connection_instance.""" + config = DuckDBConfig( + connection_config={"database": ":memory:", "read_only": False, "pool_min_size": 3, "pool_max_size": 12}, + connection_instance=None, + ) + + # DuckDB converts :memory: to :memory:shared_db for pooling + assert "memory" in config.connection_config["database"] + assert config.connection_config["read_only"] is False + assert config.connection_config["pool_min_size"] == 3 + assert config.connection_config["pool_max_size"] == 12 + assert config.connection_instance is None + + +def test_bigquery_config_accepts_connection_parameters() -> None: + """Test BigQueryConfig accepts connection_config and connection_instance.""" + config = BigQueryConfig( + connection_config={"project": "my-project", "dataset_id": "my_dataset", "location": "US"}, + connection_instance=None, + ) + + assert config.connection_config["project"] == "my-project" + assert config.connection_config["dataset_id"] == "my_dataset" + assert config.connection_config["location"] == "US" + assert config.connection_instance is None + + +def test_adbc_config_accepts_connection_parameters() -> None: + """Test AdbcConfig accepts connection_config and connection_instance.""" + config = AdbcConfig( + connection_config={"driver": "adbc_driver_postgresql", "uri": "postgresql://localhost/test"}, + connection_instance=None, + ) + + assert config.connection_config["driver"] == "adbc_driver_postgresql" + assert config.connection_config["uri"] == "postgresql://localhost/test" + assert config.connection_instance is None + + +def test_spanner_config_accepts_connection_parameters() -> None: + """Test SpannerSyncConfig accepts connection_config and connection_instance.""" + config = SpannerSyncConfig( + connection_config={"instance_id": "test-instance", "database_id": "test-database"}, connection_instance=None + ) + + assert config.connection_config["instance_id"] == "test-instance" + assert config.connection_config["database_id"] == "test-database" + assert config.connection_instance is None + + +def test_connection_config_empty_dict_is_valid() -> None: + """Test that empty connection_config dict is valid for adapters.""" + # Use AsyncpgConfig which doesn't add defaults + config = AsyncpgConfig(connection_config={}) + + assert config.connection_config == {} + assert isinstance(config.connection_config, dict) + + +def test_connection_config_can_be_modified_after_creation() -> None: + """Test that connection_config dict can be modified after config creation.""" + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + config.connection_config["min_size"] = 5 + config.connection_config["max_size"] = 10 + + assert config.connection_config["min_size"] == 5 + assert config.connection_config["max_size"] == 10 + + +def test_connection_config_preserves_all_keys() -> None: + """Test that connection_config preserves all provided keys.""" + test_config = { + "dsn": "postgresql://localhost/test", + "min_size": 5, + "max_size": 10, + "timeout": 30.0, + "command_timeout": 60.0, + "server_settings": {"application_name": "sqlspec"}, + } + config = AsyncpgConfig(connection_config=test_config) + + for key, value in test_config.items(): + assert config.connection_config[key] == value + + +def test_multiple_configs_have_independent_connection_configs() -> None: + """Test that multiple config instances have independent connection_config dicts.""" + config1 = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db1"}) + config2 = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/db2"}) + + assert config1.connection_config["dsn"] == "postgresql://localhost/db1" + assert config2.connection_config["dsn"] == "postgresql://localhost/db2" + + # Modify config1 should not affect config2 + config1.connection_config["min_size"] = 5 + assert "min_size" not in config2.connection_config + + +def test_connection_config_with_nested_dicts() -> None: + """Test that connection_config handles nested dict values.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", + "server_settings": {"application_name": "sqlspec", "timezone": "UTC"}, + } + ) + + assert config.connection_config["server_settings"]["application_name"] == "sqlspec" + assert config.connection_config["server_settings"]["timezone"] == "UTC" + + +def test_connection_config_with_various_value_types() -> None: + """Test that connection_config handles various value types.""" + config = AsyncpgConfig( + connection_config={ + "dsn": "postgresql://localhost/test", # str + "min_size": 5, # int + "timeout": 30.5, # float + "ssl": True, # bool + "server_settings": {"key": "value"}, # dict + } + ) + + assert isinstance(config.connection_config["dsn"], str) + assert isinstance(config.connection_config["min_size"], int) + assert isinstance(config.connection_config["timeout"], float) + assert isinstance(config.connection_config["ssl"], bool) + assert isinstance(config.connection_config["server_settings"], dict) + + +def test_sqlite_custom_pool_parameters_in_connection_config() -> None: + """Test that SQLite custom pool parameters work in connection_config.""" + config = SqliteConfig( + connection_config={ + "database": ":memory:", + "pool_min_size": 5, + "pool_max_size": 10, + "pool_pre_ping": True, + "pool_recycle": 3600, + } + ) + + assert config.connection_config["pool_min_size"] == 5 + assert config.connection_config["pool_max_size"] == 10 + assert config.connection_config["pool_pre_ping"] is True + assert config.connection_config["pool_recycle"] == 3600 + + +def test_aiosqlite_custom_pool_parameters_in_connection_config() -> None: + """Test that AioSQLite custom pool parameters work in connection_config.""" + config = AiosqliteConfig( + connection_config={ + "database": ":memory:", + "pool_min_size": 2, + "pool_max_size": 8, + "pool_pre_ping": False, + "pool_recycle": 7200, + } + ) + + assert config.connection_config["pool_min_size"] == 2 + assert config.connection_config["pool_max_size"] == 8 + assert config.connection_config["pool_pre_ping"] is False + assert config.connection_config["pool_recycle"] == 7200 + + +def test_duckdb_custom_pool_parameters_in_connection_config() -> None: + """Test that DuckDB custom pool parameters work in connection_config.""" + config = DuckDBConfig( + connection_config={"database": ":memory:", "pool_min_size": 3, "pool_max_size": 12, "pool_pre_ping": True} + ) + + assert config.connection_config["pool_min_size"] == 3 + assert config.connection_config["pool_max_size"] == 12 + assert config.connection_config["pool_pre_ping"] is True + + +def test_connection_config_parameter_naming_consistency() -> None: + """Test that all adapters use consistent connection_config parameter name.""" + adapters = [ + (AsyncpgConfig, {"dsn": "postgresql://localhost/test"}, None), + (PsycopgAsyncConfig, {"conninfo": "postgresql://localhost/test"}, None), + (PsycopgSyncConfig, {"conninfo": "postgresql://localhost/test"}, None), + (AsyncmyConfig, {"host": "localhost", "database": "test"}, "port"), # Adds default port + (PsqlpyConfig, {"dsn": "postgresql://localhost/test"}, None), + (OracleAsyncConfig, {"user": "system", "password": "pwd", "dsn": "localhost/XE"}, None), + (OracleSyncConfig, {"user": "system", "password": "pwd", "dsn": "localhost/XE"}, None), + (SqliteConfig, {"database": ":memory:"}, "database"), # Converts :memory: to URI + (AiosqliteConfig, {"database": ":memory:"}, "database"), # Converts :memory: to URI + (DuckDBConfig, {"database": ":memory:"}, "database"), # Converts :memory: to shared_db + (BigQueryConfig, {"project": "test-project"}, None), + (AdbcConfig, {"driver": "adbc_driver_sqlite"}, None), + (SpannerSyncConfig, {"instance_id": "test", "database_id": "test"}, None), + ] + + for adapter_class, config_dict, modified_key in adapters: + config = adapter_class(connection_config=config_dict) + assert hasattr(config, "connection_config") + assert hasattr(config, "connection_instance") + + # Check that original keys are present (may be modified or have defaults added) + for key in config_dict: + if key != modified_key: + assert key in config.connection_config + + assert config.connection_instance is None + + +def test_connection_instance_parameter_naming_consistency() -> None: + """Test that all adapters use consistent connection_instance parameter name.""" + adapters = [ + (AsyncpgConfig, {"dsn": "postgresql://localhost/test"}), + (PsycopgAsyncConfig, {"conninfo": "postgresql://localhost/test"}), + (PsycopgSyncConfig, {"conninfo": "postgresql://localhost/test"}), + (AsyncmyConfig, {"host": "localhost", "database": "test"}), + (PsqlpyConfig, {"dsn": "postgresql://localhost/test"}), + (OracleAsyncConfig, {"user": "system", "password": "pwd", "dsn": "localhost/XE"}), + (OracleSyncConfig, {"user": "system", "password": "pwd", "dsn": "localhost/XE"}), + (SqliteConfig, {"database": ":memory:"}), + (AiosqliteConfig, {"database": ":memory:"}), + (DuckDBConfig, {"database": ":memory:"}), + (BigQueryConfig, {"project": "test-project"}), + (AdbcConfig, {"driver": "adbc_driver_sqlite"}), + (SpannerSyncConfig, {"instance_id": "test", "database_id": "test"}), + ] + + for adapter_class, config_dict in adapters: + config = adapter_class(connection_config=config_dict, connection_instance=None) + assert hasattr(config, "connection_instance") + assert config.connection_instance is None + + +@pytest.mark.asyncio +async def test_asyncpg_config_with_pre_created_pool() -> None: + """Test AsyncpgConfig with connection_instance set to pre-created pool.""" + from unittest.mock import AsyncMock, MagicMock + + # Create a mock pool + mock_pool = MagicMock() + mock_pool.acquire = AsyncMock() + + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}, connection_instance=mock_pool) + + assert config.connection_instance is mock_pool + assert config.connection_config["dsn"] == "postgresql://localhost/test" + + +def test_sqlite_config_with_pre_created_pool() -> None: + """Test SqliteConfig with connection_instance set to pre-created pool.""" + from unittest.mock import MagicMock + + # Create a mock pool + mock_pool = MagicMock() + + config = SqliteConfig(connection_config={"database": ":memory:"}, connection_instance=mock_pool) + + assert config.connection_instance is mock_pool + # SQLite converts :memory: to shared memory URI for pooling + assert "memory" in config.connection_config["database"] + + +def test_duckdb_config_with_pre_created_pool() -> None: + """Test DuckDBConfig with connection_instance set to pre-created pool.""" + from unittest.mock import MagicMock + + # Create a mock pool + mock_pool = MagicMock() + + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=mock_pool) + + assert config.connection_instance is mock_pool + # DuckDB converts :memory: to :memory:shared_db for pooling + assert "memory" in config.connection_config["database"] + + +def test_bigquery_config_with_pre_created_client() -> None: + """Test BigQueryConfig with connection_instance set to pre-created client.""" + from unittest.mock import MagicMock + + # Create a mock client + mock_client = MagicMock() + + config = BigQueryConfig(connection_config={"project": "test-project"}, connection_instance=mock_client) + + assert config.connection_instance is mock_client + assert config.connection_config["project"] == "test-project" + + +def test_connection_instance_bypasses_pool_creation() -> None: + """Test that providing connection_instance bypasses pool creation logic.""" + from unittest.mock import MagicMock + + mock_pool = MagicMock() + + config = DuckDBConfig(connection_config={"database": ":memory:"}, connection_instance=mock_pool) + + # When connection_instance is set, _create_pool should not be called + # and provide_pool should return the provided instance + pool = config.provide_pool() + + assert pool is mock_pool + + +def test_connection_config_does_not_accept_invalid_types() -> None: + """Test that connection_config validates type at runtime (if validation exists).""" + # Note: SQLSpec uses TypedDict, so type validation happens at type-check time + # At runtime, we just ensure dict assignment works + config = AsyncpgConfig(connection_config={"dsn": "postgresql://localhost/test"}) + + assert isinstance(config.connection_config, dict) diff --git a/tests/unit/test_config/test_migration_methods.py b/tests/unit/test_config/test_migration_methods.py index 773cf5c4c..ef076c26d 100644 --- a/tests/unit/test_config/test_migration_methods.py +++ b/tests/unit/test_config/test_migration_methods.py @@ -78,7 +78,9 @@ def test_sqlite_config_migrate_up_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "upgrade", return_value=None) as mock_upgrade: config.migrate_up(revision="head", allow_missing=True, auto_sync=False, dry_run=True) @@ -91,7 +93,9 @@ def test_sqlite_config_migrate_down_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "downgrade", return_value=None) as mock_downgrade: config.migrate_down(revision="-2", dry_run=True) @@ -104,7 +108,9 @@ def test_sqlite_config_get_current_migration_calls_commands(tmp_path: Path) -> N migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "current", return_value="0001") as mock_current: result = config.get_current_migration(verbose=True) @@ -118,7 +124,9 @@ def test_sqlite_config_create_migration_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "revision", return_value=None) as mock_revision: config.create_migration(message="test migration", file_type="py") @@ -131,7 +139,9 @@ def test_sqlite_config_init_migrations_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "init", return_value=None) as mock_init: config.init_migrations(directory=str(migration_dir), package=False) @@ -144,7 +154,9 @@ def test_sqlite_config_init_migrations_uses_default_directory(tmp_path: Path) -> migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "init", return_value=None) as mock_init: config.init_migrations(package=True) @@ -157,7 +169,9 @@ def test_sqlite_config_stamp_migration_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "stamp", return_value=None) as mock_stamp: config.stamp_migration(revision="0001") @@ -170,7 +184,9 @@ def test_sqlite_config_fix_migrations_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "fix", return_value=None) as mock_fix: config.fix_migrations(dry_run=True, update_database=False, yes=True) @@ -184,7 +200,8 @@ async def test_asyncpg_config_migrate_up_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "upgrade", return_value=None) as mock_upgrade: @@ -199,7 +216,8 @@ async def test_asyncpg_config_migrate_down_calls_commands(tmp_path: Path) -> Non migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "downgrade", return_value=None) as mock_downgrade: @@ -214,7 +232,8 @@ async def test_asyncpg_config_get_current_migration_calls_commands(tmp_path: Pat migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "current", return_value="0002") as mock_current: @@ -230,7 +249,8 @@ async def test_asyncpg_config_create_migration_calls_commands(tmp_path: Path) -> migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "revision", return_value=None) as mock_revision: @@ -245,7 +265,8 @@ async def test_asyncpg_config_init_migrations_calls_commands(tmp_path: Path) -> migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "init", return_value=None) as mock_init: @@ -260,7 +281,8 @@ async def test_asyncpg_config_stamp_migration_calls_commands(tmp_path: Path) -> migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "stamp", return_value=None) as mock_stamp: @@ -275,7 +297,8 @@ async def test_asyncpg_config_fix_migrations_calls_commands(tmp_path: Path) -> N migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "fix", return_value=None) as mock_fix: @@ -289,7 +312,7 @@ def test_duckdb_pooled_config_migrate_up_calls_commands(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = DuckDBConfig( - pool_config={"database": ":memory:"}, migration_config={"script_location": str(migration_dir)} + connection_config={"database": ":memory:"}, migration_config={"script_location": str(migration_dir)} ) with patch.object(SyncMigrationCommands, "upgrade", return_value=None) as mock_upgrade: @@ -303,7 +326,7 @@ def test_duckdb_pooled_config_get_current_migration_calls_commands(tmp_path: Pat migration_dir = tmp_path / "migrations" config = DuckDBConfig( - pool_config={"database": ":memory:"}, migration_config={"script_location": str(migration_dir)} + connection_config={"database": ":memory:"}, migration_config={"script_location": str(migration_dir)} ) with patch.object(SyncMigrationCommands, "current", return_value=None) as mock_current: @@ -320,7 +343,7 @@ async def test_aiosqlite_async_config_migrate_up_calls_commands(tmp_path: Path) temp_db = str(tmp_path / "test.db") config = AiosqliteConfig( - pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} ) with patch.object(AsyncMigrationCommands, "upgrade", return_value=None) as mock_upgrade: @@ -334,7 +357,9 @@ def test_migrate_up_default_parameters_sync(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "upgrade", return_value=None) as mock_upgrade: config.migrate_up() @@ -348,7 +373,8 @@ async def test_migrate_up_default_parameters_async(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "upgrade", return_value=None) as mock_upgrade: @@ -362,7 +388,9 @@ def test_migrate_down_default_parameters_sync(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "downgrade", return_value=None) as mock_downgrade: config.migrate_down() @@ -376,7 +404,8 @@ async def test_migrate_down_default_parameters_async(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "downgrade", return_value=None) as mock_downgrade: @@ -390,7 +419,9 @@ def test_create_migration_default_file_type_sync(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "revision", return_value=None) as mock_revision: config.create_migration(message="test migration") @@ -404,7 +435,8 @@ async def test_create_migration_default_file_type_async(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "revision", return_value=None) as mock_revision: @@ -418,7 +450,9 @@ def test_init_migrations_default_package_sync(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "init", return_value=None) as mock_init: config.init_migrations(directory=str(migration_dir)) @@ -432,7 +466,8 @@ async def test_init_migrations_default_package_async(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "init", return_value=None) as mock_init: @@ -446,7 +481,9 @@ def test_fix_migrations_default_parameters_sync(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" temp_db = str(tmp_path / "test.db") - config = SqliteConfig(pool_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)}) + config = SqliteConfig( + connection_config={"database": temp_db}, migration_config={"script_location": str(migration_dir)} + ) with patch.object(SyncMigrationCommands, "fix", return_value=None) as mock_fix: config.fix_migrations() @@ -460,7 +497,8 @@ async def test_fix_migrations_default_parameters_async(tmp_path: Path) -> None: migration_dir = tmp_path / "migrations" config = AsyncpgConfig( - pool_config={"dsn": "postgresql://localhost/test"}, migration_config={"script_location": str(migration_dir)} + connection_config={"dsn": "postgresql://localhost/test"}, + migration_config={"script_location": str(migration_dir)}, ) with patch.object(AsyncMigrationCommands, "fix", return_value=None) as mock_fix: diff --git a/tests/unit/test_config_resolver.py b/tests/unit/test_config_resolver.py index 1bb57d67b..e6099a5cb 100644 --- a/tests/unit/test_config_resolver.py +++ b/tests/unit/test_config_resolver.py @@ -151,12 +151,12 @@ def mixed_config_list() -> list[Any]: async def test_config_validation_attributes(self) -> None: """Test that config validation checks for required attributes.""" - # Test config missing both database_url and pool_config + # Test config missing both database_url and connection_config class IncompleteConfig: def __init__(self) -> None: self.bind_key = "test" self.migration_config: dict[str, Any] = {} - # Missing both pool_config and database_url + # Missing both connection_config and database_url def incomplete_config() -> "IncompleteConfig": return IncompleteConfig() diff --git a/tests/unit/test_extensions/test_fastapi/test_extension.py b/tests/unit/test_extensions/test_fastapi/test_extension.py index b8ac1a4ca..7940fdb0a 100644 --- a/tests/unit/test_extensions/test_fastapi/test_extension.py +++ b/tests/unit/test_extensions/test_fastapi/test_extension.py @@ -16,7 +16,7 @@ def test_provide_session_method_exists() -> None: """Test that provide_session() method exists (not session_dependency()).""" sqlspec = SQLSpec() - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) plugin = SQLSpecPlugin(sqlspec) @@ -32,7 +32,7 @@ def test_provide_session_method_exists() -> None: def test_provide_connection_method_exists() -> None: """Test that provide_connection() method exists (not connection_dependency()).""" sqlspec = SQLSpec() - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) plugin = SQLSpecPlugin(sqlspec) @@ -48,7 +48,7 @@ def test_provide_connection_method_exists() -> None: def test_uses_starlette_default_session_key() -> None: """FastAPI inherits from Starlette and should use same DEFAULT_SESSION_KEY.""" sqlspec = SQLSpec() - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) plugin = SQLSpecPlugin(sqlspec) @@ -63,7 +63,7 @@ def test_respects_custom_session_key() -> None: custom_key = "custom_db" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"session_key": custom_key}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"session_key": custom_key}} ) sqlspec.add_config(config) @@ -77,7 +77,7 @@ def test_provide_session_works_in_route() -> None: """Test that provide_session() works correctly in FastAPI routes.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) diff --git a/tests/unit/test_extensions/test_flask/test_extension.py b/tests/unit/test_extensions/test_flask/test_extension.py index 40abd6f44..dc5065733 100644 --- a/tests/unit/test_extensions/test_flask/test_extension.py +++ b/tests/unit/test_extensions/test_flask/test_extension.py @@ -19,7 +19,7 @@ def test_shutdown_closes_sync_pools(monkeypatch: pytest.MonkeyPatch) -> None: """Shutdown should dispose sync pools exactly once.""" sqlspec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) app = Flask(__name__) @@ -46,7 +46,7 @@ def test_shutdown_closes_async_pools_and_stops_portal(monkeypatch: pytest.Monkey with tempfile.NamedTemporaryFile(suffix=".db", delete=True) as tmp: sqlspec = SQLSpec() - config = AiosqliteConfig(pool_config={"database": tmp.name}) + config = AiosqliteConfig(connection_config={"database": tmp.name}) sqlspec.add_config(config) app = Flask(__name__) @@ -76,7 +76,7 @@ def test_default_session_key_is_db_session() -> None: def test_uses_default_session_key_when_not_configured() -> None: """Plugin should use DEFAULT_SESSION_KEY when no extension_config provided.""" sqlspec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) plugin = SQLSpecPlugin(sqlspec) @@ -89,7 +89,9 @@ def test_respects_custom_session_key() -> None: """Plugin should respect custom session_key in extension_config.""" custom_key = "custom_db" sqlspec = SQLSpec() - config = SqliteConfig(pool_config={"database": ":memory:"}, extension_config={"flask": {"session_key": custom_key}}) + config = SqliteConfig( + connection_config={"database": ":memory:"}, extension_config={"flask": {"session_key": custom_key}} + ) sqlspec.add_config(config) plugin = SQLSpecPlugin(sqlspec) diff --git a/tests/unit/test_extensions/test_litestar/test_handlers.py b/tests/unit/test_extensions/test_litestar/test_handlers.py index b6d7f1d67..dadaf4cfd 100644 --- a/tests/unit/test_extensions/test_litestar/test_handlers.py +++ b/tests/unit/test_extensions/test_litestar/test_handlers.py @@ -155,7 +155,7 @@ async def test_async_autocommit_handler_raises_on_conflicting_statuses() -> None async def test_async_lifespan_handler_creates_and_closes_pool() -> None: """Test async lifespan handler manages pool lifecycle.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) pool_key = "test_pool" handler = lifespan_handler_maker(config, pool_key) @@ -174,7 +174,7 @@ async def test_async_lifespan_handler_creates_and_closes_pool() -> None: async def test_async_pool_provider_returns_pool() -> None: """Test async pool provider returns pool from state.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) pool_key = "test_pool" provider = pool_provider_maker(config, pool_key) @@ -192,7 +192,7 @@ async def test_async_pool_provider_returns_pool() -> None: async def test_async_pool_provider_raises_when_pool_missing() -> None: """Test async pool provider raises error when pool not in state.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) pool_key = "test_pool" provider = pool_provider_maker(config, pool_key) @@ -210,7 +210,7 @@ async def test_async_pool_provider_raises_when_pool_missing() -> None: async def test_async_connection_provider_creates_connection() -> None: """Test async connection provider creates connection from pool.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) pool_key = "test_pool" connection_key = "test_connection" @@ -229,7 +229,7 @@ async def test_async_connection_provider_creates_connection() -> None: async def test_async_connection_provider_raises_when_pool_missing() -> None: """Test async connection provider raises error when pool missing.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) pool_key = "test_pool" connection_key = "test_connection" @@ -248,7 +248,7 @@ async def test_async_connection_provider_raises_when_pool_missing() -> None: async def test_sync_connection_provider_supports_context_manager() -> None: """Test sync connection provider wraps sync context managers.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) pool_key = "test_pool" connection_key = "test_connection" @@ -271,7 +271,7 @@ async def test_sync_connection_provider_supports_context_manager() -> None: async def test_async_session_provider_creates_session() -> None: """Test async session provider creates driver session.""" - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) connection_key = "test_connection" provider = session_provider_maker(config, connection_key) diff --git a/tests/unit/test_extensions/test_starlette/test_extension.py b/tests/unit/test_extensions/test_starlette/test_extension.py index 34f24260c..5a110a2c4 100644 --- a/tests/unit/test_extensions/test_starlette/test_extension.py +++ b/tests/unit/test_extensions/test_starlette/test_extension.py @@ -6,6 +6,7 @@ from starlette.applications import Starlette from starlette.responses import JSONResponse +from starlette.routing import Route from starlette.testclient import TestClient from sqlspec import SQLSpec @@ -22,7 +23,7 @@ def test_default_session_key_is_db_session() -> None: def test_uses_default_session_key_when_not_configured() -> None: """Plugin should use DEFAULT_SESSION_KEY when no extension_config provided.""" sqlspec = SQLSpec() - config = AiosqliteConfig(pool_config={"database": ":memory:"}) + config = AiosqliteConfig(connection_config={"database": ":memory:"}) sqlspec.add_config(config) plugin = SQLSpecPlugin(sqlspec) @@ -36,7 +37,7 @@ def test_respects_custom_session_key() -> None: custom_key = "custom_db" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"session_key": custom_key}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"session_key": custom_key}} ) sqlspec.add_config(config) @@ -50,19 +51,22 @@ def test_get_session_works_in_route() -> None: """Test that get_session() works correctly in Starlette routes.""" sqlspec = SQLSpec() config = AiosqliteConfig( - pool_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} + connection_config={"database": ":memory:"}, extension_config={"starlette": {"commit_mode": "autocommit"}} ) sqlspec.add_config(config) - app = Starlette() - plugin = SQLSpecPlugin(sqlspec, app) + plugin_ref: SQLSpecPlugin | None = None - @app.route("/test") - async def test_route(request): - db = plugin.get_session(request) + async def test_route(request): # type: ignore[no-untyped-def] + assert plugin_ref is not None + db = plugin_ref.get_session(request) result = await db.execute("SELECT 1 as value") return JSONResponse({"value": result.scalar()}) + routes = [Route("/test", test_route)] + app = Starlette(routes=routes) + plugin_ref = SQLSpecPlugin(sqlspec, app) + with TestClient(app) as client: response = client.get("/test") assert response.status_code == 200 diff --git a/tests/unit/test_migrations/test_extension_discovery.py b/tests/unit/test_migrations/test_extension_discovery.py index c9afa8d83..f136931c9 100644 --- a/tests/unit/test_migrations/test_extension_discovery.py +++ b/tests/unit/test_migrations/test_extension_discovery.py @@ -9,7 +9,7 @@ def test_extension_migration_discovery(tmp_path: Path) -> None: """Test that extension migrations are discovered when configured.""" config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={ "script_location": str(tmp_path), "version_table_name": "test_migrations", @@ -31,7 +31,7 @@ def test_extension_migration_discovery(tmp_path: Path) -> None: def test_extension_migration_context(tmp_path: Path) -> None: """Test that migration context is created with dialect information.""" config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"script_location": str(tmp_path), "include_extensions": ["litestar"]}, ) @@ -44,7 +44,9 @@ def test_extension_migration_context(tmp_path: Path) -> None: def test_no_extensions_by_default(tmp_path: Path) -> None: """Test that no extension migrations are included by default.""" - config = SqliteConfig(pool_config={"database": ":memory:"}, migration_config={"script_location": str(tmp_path)}) + config = SqliteConfig( + connection_config={"database": ":memory:"}, migration_config={"script_location": str(tmp_path)} + ) commands = SyncMigrationCommands(config) @@ -65,7 +67,7 @@ def test_migration_file_discovery_with_extensions(tmp_path: Path) -> None: """) config = SqliteConfig( - pool_config={"database": ":memory:"}, + connection_config={"database": ":memory:"}, migration_config={"script_location": str(migrations_dir), "include_extensions": ["litestar"]}, ) diff --git a/tests/unit/test_migrations/test_migration_commands.py b/tests/unit/test_migrations/test_migration_commands.py index 1d8051fdc..e7cffbda5 100644 --- a/tests/unit/test_migrations/test_migration_commands.py +++ b/tests/unit/test_migrations/test_migration_commands.py @@ -25,13 +25,13 @@ @pytest.fixture def sync_config() -> SqliteConfig: """Create a sync database config for testing.""" - return SqliteConfig(pool_config={"database": ":memory:"}) + return SqliteConfig(connection_config={"database": ":memory:"}) @pytest.fixture def async_config() -> AiosqliteConfig: """Create an async database config for testing.""" - return AiosqliteConfig(pool_config={"database": ":memory:"}) + return AiosqliteConfig(connection_config={"database": ":memory:"}) def test_migration_commands_sync_config_initialization(sync_config: SqliteConfig) -> None: diff --git a/tests/unit/test_migrations/test_migration_context.py b/tests/unit/test_migrations/test_migration_context.py index 58aceb7eb..72ec5b704 100644 --- a/tests/unit/test_migrations/test_migration_context.py +++ b/tests/unit/test_migrations/test_migration_context.py @@ -7,7 +7,7 @@ def test_migration_context_from_sqlite_config() -> None: """Test creating migration context from SQLite config.""" - config = SqliteConfig(pool_config={"database": ":memory:"}) + config = SqliteConfig(connection_config={"database": ":memory:"}) context = MigrationContext.from_config(config) assert context.dialect == "sqlite" @@ -18,7 +18,9 @@ def test_migration_context_from_sqlite_config() -> None: def test_migration_context_from_postgres_config() -> None: """Test creating migration context from PostgreSQL config.""" - config = PsycopgSyncConfig(pool_config={"host": "localhost", "dbname": "test", "user": "test", "password": "test"}) + config = PsycopgSyncConfig( + connection_config={"host": "localhost", "dbname": "test", "user": "test", "password": "test"} + ) context = MigrationContext.from_config(config) # PostgreSQL config should have postgres dialect diff --git a/tests/unit/test_observability.py b/tests/unit/test_observability.py index fadde02cc..25cb44dc8 100644 --- a/tests/unit/test_observability.py +++ b/tests/unit/test_observability.py @@ -412,7 +412,7 @@ def test_telemetry_snapshot_includes_recent_storage_jobs() -> None: reset_storage_bridge_events() spec = SQLSpec() - spec.add_config(SqliteConfig(pool_config={"database": ":memory:"})) + spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) record_storage_diagnostic_event({ "destination": "alias://bucket/path",