diff --git a/pyproject.toml b/pyproject.toml index 0a2598089..bcb6d2b02 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ psycopg = ["psycopg[binary,pool]"] pydantic = ["pydantic", "pydantic-extra-types"] pymssql = ["pymssql"] pymysql = ["pymysql"] +sanic = ["sanic", "sanic[ext]>=24.6.0"] spanner = ["google-cloud-spanner"] uuid = ["uuid-utils"] @@ -90,6 +91,10 @@ extras = [ "adbc_driver_postgresql", "adbc_driver_flightsql", "adbc_driver_bigquery", + "sanic-testing", + "dishka ; python_version >= \"3.10\"", + "pydantic-extra-types", + "fsspec[s3]", ] lint = [ "mypy>=1.13.0", @@ -466,6 +471,7 @@ split-on-trailing-comma = false "docs/**/*.*" = ["S", "B", "DTZ", "A", "TC", "ERA", "D", "RET", "PLW0127"] "docs/examples/**" = ["T201"] "sqlspec/builder/mixins/**/*.*" = ["SLF001"] +"sqlspec/extensions/fastapi/providers.py" = ["B008"] "tests/**/*.*" = [ "A", "ARG", diff --git a/sqlspec/extensions/fastapi/__init__.py b/sqlspec/extensions/fastapi/__init__.py new file mode 100644 index 000000000..8657dca67 --- /dev/null +++ b/sqlspec/extensions/fastapi/__init__.py @@ -0,0 +1,13 @@ +from sqlspec.extensions.fastapi._middleware import SessionMiddleware +from sqlspec.extensions.fastapi.config import DatabaseConfig +from sqlspec.extensions.fastapi.extension import SQLSpec +from sqlspec.extensions.fastapi.providers import FilterConfig, create_filter_dependencies, provide_filters + +__all__ = ( + "DatabaseConfig", + "FilterConfig", + "SQLSpec", + "SessionMiddleware", + "create_filter_dependencies", + "provide_filters", +) diff --git a/sqlspec/extensions/fastapi/_middleware.py b/sqlspec/extensions/fastapi/_middleware.py new file mode 100644 index 000000000..4fa52f73c --- /dev/null +++ b/sqlspec/extensions/fastapi/_middleware.py @@ -0,0 +1,117 @@ +"""Middleware for SQLSpec FastAPI integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any, Optional + +from starlette.middleware.base import BaseHTTPMiddleware + +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + + from starlette.requests import Request + from starlette.responses import Response + + from sqlspec.extensions.fastapi.config import CommitMode, DatabaseConfig + + +__all__ = ("SessionMiddleware",) + + +class SessionMiddleware(BaseHTTPMiddleware): + """Middleware for managing database sessions and transactions in FastAPI.""" + + def __init__( + self, + app: Any, + config: "DatabaseConfig", + commit_mode: "CommitMode" = "manual", + extra_commit_statuses: "Optional[set[int]]" = None, + extra_rollback_statuses: "Optional[set[int]]" = None, + ) -> None: + """Initialize session middleware. + + Args: + app: The ASGI application. + config: Database configuration instance. + commit_mode: Transaction commit behavior. + extra_commit_statuses: Additional status codes that trigger commits. + extra_rollback_statuses: Additional status codes that trigger rollbacks. + """ + super().__init__(app) + self.config = config + self.commit_mode = commit_mode + self.extra_commit_statuses = extra_commit_statuses or set() + self.extra_rollback_statuses = extra_rollback_statuses or set() + + async def dispatch(self, request: "Request", call_next: "Callable[[Request], Awaitable[Response]]") -> "Response": + """Handle request with session management. + + Args: + request: The incoming request. + call_next: The next middleware or endpoint. + + Returns: + The response from the application. + """ + if not self.config.connection_provider: + return await call_next(request) + + # Get connection from provider + connection_gen = self.config.connection_provider() + connection = await connection_gen.__anext__() + + # Store connection in request state + request.state.__dict__[self.config.connection_key] = connection + + try: + response = await call_next(request) + + # Handle transaction based on commit mode and response status + if self.commit_mode != "manual": + await self._handle_transaction(connection, response.status_code) + + except Exception: + # Rollback on exception + if hasattr(connection, "rollback") and callable(connection.rollback): + await ensure_async_(connection.rollback)() + raise + else: + return response + finally: + # Clean up connection + with contextlib.suppress(StopAsyncIteration): + await connection_gen.__anext__() + if hasattr(connection, "close") and callable(connection.close): + await ensure_async_(connection.close)() + + async def _handle_transaction(self, connection: Any, status_code: int) -> None: + """Handle transaction commit/rollback based on status code. + + Args: + connection: The database connection. + status_code: HTTP response status code. + """ + http_ok = 200 + http_multiple_choices = 300 + http_bad_request = 400 + + should_commit = False + + if self.commit_mode == "autocommit": + should_commit = http_ok <= status_code < http_multiple_choices + elif self.commit_mode == "autocommit_include_redirect": + should_commit = http_ok <= status_code < http_bad_request + + # Apply extra status overrides + if status_code in self.extra_commit_statuses: + should_commit = True + elif status_code in self.extra_rollback_statuses: + should_commit = False + + # Execute transaction action + if should_commit and hasattr(connection, "commit") and callable(connection.commit): + await ensure_async_(connection.commit)() + elif not should_commit and hasattr(connection, "rollback") and callable(connection.rollback): + await ensure_async_(connection.rollback)() diff --git a/sqlspec/extensions/fastapi/_providers.py b/sqlspec/extensions/fastapi/_providers.py new file mode 100644 index 000000000..3691f2f21 --- /dev/null +++ b/sqlspec/extensions/fastapi/_providers.py @@ -0,0 +1,106 @@ +"""Provider functions for SQLSpec FastAPI integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any, cast + +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable, Callable + + from sqlspec.config import DatabaseConfigProtocol, DriverT + from sqlspec.typing import ConnectionT, PoolT + + +__all__ = ("create_connection_provider", "create_pool_provider", "create_session_provider") + + +def create_pool_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str +) -> "Callable[[], Awaitable[PoolT]]": + """Create provider for database pool access. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + + Returns: + The pool provider function. + """ + + async def provide_pool() -> "PoolT": + """Provide the database pool. + + Returns: + The database connection pool. + """ + db_pool = await ensure_async_(config.create_pool)() + return cast("PoolT", db_pool) + + return provide_pool + + +def create_connection_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str, connection_key: str +) -> "Callable[[], AsyncGenerator[ConnectionT, None]]": + """Create provider for database connections. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + connection_key: The key used to store the connection. + + Returns: + The connection provider function. + """ + + async def provide_connection() -> "AsyncGenerator[ConnectionT, None]": + """Provide a database connection. + + Yields: + Database connection instance. + """ + db_pool = await ensure_async_(config.create_pool)() + + try: + connection_cm = config.provide_connection(db_pool) + + # Handle both context managers and direct connections + if hasattr(connection_cm, "__aenter__"): + async with connection_cm as conn: + yield cast("ConnectionT", conn) + else: + conn = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + yield cast("ConnectionT", conn) + finally: + with contextlib.suppress(Exception): + await ensure_async_(config.close_pool)() + + return provide_connection + + +def create_session_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str +) -> "Callable[[ConnectionT], AsyncGenerator[DriverT, None]]": + """Create provider for database sessions/drivers. + + Args: + config: The database configuration object. + connection_key: The key used to access the connection. + + Returns: + The session provider function. + """ + + async def provide_session(connection: "ConnectionT") -> "AsyncGenerator[DriverT, None]": + """Provide a database session/driver. + + Args: + connection: The database connection. + + Yields: + Database driver/session instance. + """ + yield cast("DriverT", config.driver_type(connection=connection)) + + return provide_session diff --git a/sqlspec/extensions/fastapi/cli.py b/sqlspec/extensions/fastapi/cli.py new file mode 100644 index 000000000..8b29b8c54 --- /dev/null +++ b/sqlspec/extensions/fastapi/cli.py @@ -0,0 +1,62 @@ +"""FastAPI CLI integration for SQLSpec migrations.""" + +from contextlib import suppress +from typing import TYPE_CHECKING, cast + +from sqlspec.cli import add_migration_commands + +try: + import rich_click as click +except ImportError: + import click # type: ignore[no-redef] + +if TYPE_CHECKING: + from fastapi import FastAPI + + from sqlspec.extensions.fastapi.extension import SQLSpec + +__all__ = ("get_database_migration_plugin", "register_database_commands") + + +def get_database_migration_plugin(app: "FastAPI") -> "SQLSpec": + """Retrieve the SQLSpec plugin from the FastAPI application. + + Args: + app: The FastAPI application + + Returns: + The SQLSpec plugin + + Raises: + ImproperConfigurationError: If the SQLSpec plugin is not found + """ + from sqlspec.exceptions import ImproperConfigurationError + + # FastAPI doesn't have a built-in plugin system like Litestar + # Check if SQLSpec was stored in app.state + with suppress(AttributeError): + if hasattr(app.state, "sqlspec"): + return cast("SQLSpec", app.state.sqlspec) + + msg = "Failed to initialize database migrations. The required SQLSpec plugin is missing." + raise ImproperConfigurationError(msg) + + +def register_database_commands(app: "FastAPI") -> click.Group: + """Register database commands with a FastAPI application. + + Args: + app: The FastAPI application instance + + Returns: + Click group with database commands + """ + + @click.group(name="db") + def database_group() -> None: + """Manage SQLSpec database components.""" + + # Add migration commands to the group + add_migration_commands(database_group) + + return database_group diff --git a/sqlspec/extensions/fastapi/config.py b/sqlspec/extensions/fastapi/config.py new file mode 100644 index 000000000..3c9b3acb0 --- /dev/null +++ b/sqlspec/extensions/fastapi/config.py @@ -0,0 +1,95 @@ +"""Configuration classes for SQLSpec FastAPI integration.""" + +from typing import TYPE_CHECKING + +from sqlspec.extensions.starlette.config import ( + DEFAULT_COMMIT_MODE, + DEFAULT_CONNECTION_KEY, + DEFAULT_POOL_KEY, + DEFAULT_SESSION_KEY, +) +from sqlspec.extensions.starlette.config import AsyncDatabaseConfig as StarletteAsyncConfig +from sqlspec.extensions.starlette.config import DatabaseConfig as StarletteConfig +from sqlspec.extensions.starlette.config import SyncDatabaseConfig as StarletteSyncConfig + +if TYPE_CHECKING: + from fastapi import FastAPI + +__all__ = ( + "DEFAULT_COMMIT_MODE", + "DEFAULT_CONNECTION_KEY", + "DEFAULT_POOL_KEY", + "DEFAULT_SESSION_KEY", + "AsyncDatabaseConfig", + "CommitMode", + "DatabaseConfig", + "SyncDatabaseConfig", +) + +# Re-export Starlette types with FastAPI-compatible typing +from sqlspec.extensions.starlette.config import CommitMode + + +class DatabaseConfig(StarletteConfig): + """Configuration for SQLSpec database integration with FastAPI applications. + + FastAPI is built on Starlette, so this configuration inherits all functionality + from the Starlette configuration. The only differences are type hints for FastAPI + Request objects and middleware imports. + """ + + def init_app(self, app: "FastAPI") -> None: # pyright: ignore + """Initialize SQLSpec configuration for FastAPI application. + + Args: + app: The FastAPI application instance. + """ + from sqlspec.extensions.fastapi._middleware import SessionMiddleware + from sqlspec.extensions.starlette._providers import ( + create_connection_provider, + create_pool_provider, + create_session_provider, + ) + + # Create providers using Starlette providers (FastAPI is compatible) + self.pool_provider = create_pool_provider(self.config, self.pool_key) + self.connection_provider = create_connection_provider(self.config, self.pool_key, self.connection_key) + self.session_provider = create_session_provider(self.config, self.connection_key) + + # Add middleware if enabled + if self.enable_middleware: + app.add_middleware( + SessionMiddleware, + config=self, + commit_mode=self.commit_mode, + extra_commit_statuses=self.extra_commit_statuses, + extra_rollback_statuses=self.extra_rollback_statuses, + ) + + # Add event handlers - delegate to parent logic but cast FastAPI to Starlette + super().init_app(app) # type: ignore[arg-type] + + +# Add typed subclasses for better developer experience +class SyncDatabaseConfig(StarletteSyncConfig): + """Sync-specific DatabaseConfig with FastAPI-compatible type hints.""" + + def init_app(self, app: "FastAPI") -> None: # pyright: ignore + """Initialize SQLSpec configuration for FastAPI application. + + Args: + app: The FastAPI application instance. + """ + DatabaseConfig.init_app(self, app) # pyright: ignore + + +class AsyncDatabaseConfig(StarletteAsyncConfig): + """Async-specific DatabaseConfig with FastAPI-compatible type hints.""" + + def init_app(self, app: "FastAPI") -> None: # pyright: ignore + """Initialize SQLSpec configuration for FastAPI application. + + Args: + app: The FastAPI application instance. + """ + DatabaseConfig.init_app(self, app) # pyright: ignore diff --git a/sqlspec/extensions/fastapi/extension.py b/sqlspec/extensions/fastapi/extension.py new file mode 100644 index 000000000..7bbc5374c --- /dev/null +++ b/sqlspec/extensions/fastapi/extension.py @@ -0,0 +1,28 @@ +"""SQLSpec extension for FastAPI applications.""" + +from typing import TYPE_CHECKING + +from sqlspec.extensions.starlette.extension import SQLSpec as StarletteExtension + +if TYPE_CHECKING: + from fastapi import FastAPI + +__all__ = ("SQLSpec",) + + +class SQLSpec(StarletteExtension): + """SQLSpec integration for FastAPI applications. + + FastAPI is built on Starlette, so this extension inherits all functionality + from the Starlette extension. The only difference is the type hints for + the init_app method to accept FastAPI apps specifically. + """ + + def init_app(self, app: "FastAPI") -> None: # pyright: ignore + """Initialize SQLSpec with FastAPI application. + + Args: + app: The FastAPI application instance. + """ + # FastAPI apps are compatible with Starlette, so delegate to parent + super().init_app(app) # type: ignore[arg-type] diff --git a/sqlspec/extensions/fastapi/providers.py b/sqlspec/extensions/fastapi/providers.py new file mode 100644 index 000000000..b0ef7f7ff --- /dev/null +++ b/sqlspec/extensions/fastapi/providers.py @@ -0,0 +1,470 @@ +"""FastAPI dependency providers for SQLSpec filters and services.""" + +import datetime +import inspect +from collections.abc import Callable +from typing import Any, Literal, NamedTuple, Optional, TypedDict, Union, cast +from uuid import UUID + +from fastapi import Depends, Query +from typing_extensions import NotRequired + +from sqlspec.core.filters import ( + BeforeAfterFilter, + FilterTypes, + InCollectionFilter, + LimitOffsetFilter, + NotInCollectionFilter, + OrderByFilter, + SearchFilter, +) +from sqlspec.utils.singleton import SingletonMeta +from sqlspec.utils.text import camelize + +# Query objects to avoid B008 warnings +IDS_QUERY = Query(alias="ids", default=None) +CREATED_BEFORE_QUERY = Query(alias="createdBefore", default=None) +CREATED_AFTER_QUERY = Query(alias="createdAfter", default=None) +UPDATED_BEFORE_QUERY = Query(alias="updatedBefore", default=None) +UPDATED_AFTER_QUERY = Query(alias="updatedAfter", default=None) +SEARCH_STRING_QUERY = Query(alias="searchString", default=None) +SEARCH_IGNORE_CASE_QUERY = Query(alias="searchIgnoreCase", default=False) +CURRENT_PAGE_QUERY = Query(alias="currentPage", ge=1, default=1) +PAGE_SIZE_QUERY = Query(alias="pageSize", ge=1, default=20) + +__all__ = ( + "DEPENDENCY_DEFAULTS", + "BooleanOrNone", + "DTorNone", + "DependencyDefaults", + "FieldNameType", + "FilterConfig", + "HashableType", + "HashableValue", + "IntOrNone", + "SortOrder", + "SortOrderOrNone", + "StringOrNone", + "UuidOrNone", + "create_filter_dependencies", + "dep_cache", + "provide_filters", +) + +DTorNone = Optional[datetime.datetime] +StringOrNone = Optional[str] +UuidOrNone = Optional[UUID] +IntOrNone = Optional[int] +BooleanOrNone = Optional[bool] +SortOrder = Literal["asc", "desc"] +SortOrderOrNone = Optional[SortOrder] +HashableValue = Union[str, int, float, bool, None] +HashableType = Union[HashableValue, tuple[Any, ...], tuple[tuple[str, Any], ...], tuple[HashableValue, ...]] + + +class DependencyDefaults: + FILTERS_DEPENDENCY_KEY: str = "filters" + CREATED_FILTER_DEPENDENCY_KEY: str = "created_filter" + ID_FILTER_DEPENDENCY_KEY: str = "id_filter" + LIMIT_OFFSET_FILTER_DEPENDENCY_KEY: str = "limit_offset_filter" + UPDATED_FILTER_DEPENDENCY_KEY: str = "updated_filter" + ORDER_BY_FILTER_DEPENDENCY_KEY: str = "order_by_filter" + SEARCH_FILTER_DEPENDENCY_KEY: str = "search_filter" + DEFAULT_PAGINATION_SIZE: int = 20 + + +DEPENDENCY_DEFAULTS = DependencyDefaults() + + +class FieldNameType(NamedTuple): + """Type for field name and associated type information for filter configuration.""" + + name: str + type_hint: type[Any] = str + + +class FilterConfig(TypedDict): + """Configuration for generating dynamic filters.""" + + id_filter: NotRequired[type[Union[UUID, int, str]]] + id_field: NotRequired[str] + sort_field: NotRequired[str] + sort_order: NotRequired[SortOrder] + pagination_type: NotRequired[Literal["limit_offset"]] + pagination_size: NotRequired[int] + search: NotRequired[Union[str, set[str], list[str]]] + search_ignore_case: NotRequired[bool] + created_at: NotRequired[bool] + updated_at: NotRequired[bool] + not_in_fields: NotRequired[Union[FieldNameType, set[FieldNameType], list[Union[str, FieldNameType]]]] + in_fields: NotRequired[Union[FieldNameType, set[FieldNameType], list[Union[str, FieldNameType]]]] + + +class DependencyCache(metaclass=SingletonMeta): + """Dependency cache for memoizing dynamically generated dependencies.""" + + def __init__(self) -> None: + self.dependencies: dict[Union[int, str], dict[str, Callable[..., Any]]] = {} + + def add_dependencies(self, key: Union[int, str], dependencies: dict[str, Callable[..., Any]]) -> None: + self.dependencies[key] = dependencies + + def get_dependencies(self, key: Union[int, str]) -> Optional[dict[str, Callable[..., Any]]]: + return self.dependencies.get(key) + + +dep_cache = DependencyCache() + + +def provide_filters(config: FilterConfig) -> Callable[..., list[FilterTypes]]: + """Create a FastAPI dependency for the combined filter function. + + Args: + config: FilterConfig instance with desired settings. + + Returns: + A FastAPI dependency that returns a list of filters based on query parameters. + + Example: + >>> filter_config = FilterConfig( + ... id_filter=int, + ... search="name,email", + ... pagination_type="limit_offset", + ... sort_field="created_at", + ... ) + >>> + >>> # Use in route handler + >>> @app.get("/users") + >>> async def get_users( + ... filters: list[FilterTypes] = Depends( + ... provide_filters(filter_config) + ... ), + ... ): + ... # filters will contain parsed query parameters + ... return await user_service.get_filtered(filters) + """ + filter_deps = create_filter_dependencies(config) + return filter_deps.get(DEPENDENCY_DEFAULTS.FILTERS_DEPENDENCY_KEY, list) + + +def create_filter_dependencies( + config: FilterConfig, dep_defaults: DependencyDefaults = DEPENDENCY_DEFAULTS +) -> dict[str, Callable[..., Any]]: + """Create FastAPI dependency providers for the combined filter function. + + Args: + config: FilterConfig instance with desired settings. + dep_defaults: Dependency defaults to use for the filter dependencies + + Returns: + A dictionary of dependency provider functions. + """ + if (deps := dep_cache.get_dependencies(cache_key := hash(_make_hashable(config)))) is not None: + return deps + deps = _create_statement_filters(config, dep_defaults) + dep_cache.add_dependencies(cache_key, deps) + return deps + + +def _make_hashable(value: Any) -> HashableType: + """Convert a value into a hashable type for caching purposes. + + Args: + value: Any value that needs to be made hashable. + + Returns: + A hashable version of the value. + """ + if isinstance(value, dict): + items = [] + for k in sorted(value.keys()): # pyright: ignore + v = value[k] + items.append((str(k), _make_hashable(v))) + return tuple(items) + if isinstance(value, (list, set)): + hashable_items = [_make_hashable(item) for item in value] + filtered_items = [item for item in hashable_items if item is not None] + return tuple(sorted(filtered_items, key=str)) + if isinstance(value, (str, int, float, bool, type(None))): + return value + return str(value) + + +def _create_statement_filters( + config: FilterConfig, dep_defaults: DependencyDefaults = DEPENDENCY_DEFAULTS +) -> dict[str, Callable[..., Any]]: + """Create filter dependencies based on configuration. + + Args: + config: Configuration dictionary specifying which filters to enable + dep_defaults: Dependency defaults to use for the filter dependencies + + Returns: + Dictionary of filter provider functions + """ + filters: dict[str, Callable[..., Any]] = {} + + if config.get("id_filter", False): + + def provide_id_filter(ids: Optional[list[str]] = IDS_QUERY) -> InCollectionFilter: # pyright: ignore[reportMissingTypeArgument] + return InCollectionFilter(field_name=config.get("id_field", "id"), values=ids) + + filters[dep_defaults.ID_FILTER_DEPENDENCY_KEY] = provide_id_filter + + if config.get("created_at", False): + + def provide_created_filter( + before: DTorNone = CREATED_BEFORE_QUERY, after: DTorNone = CREATED_AFTER_QUERY + ) -> BeforeAfterFilter: + return BeforeAfterFilter("created_at", before, after) + + filters[dep_defaults.CREATED_FILTER_DEPENDENCY_KEY] = provide_created_filter + + if config.get("updated_at", False): + + def provide_updated_filter( + before: DTorNone = UPDATED_BEFORE_QUERY, after: DTorNone = UPDATED_AFTER_QUERY + ) -> BeforeAfterFilter: + return BeforeAfterFilter("updated_at", before, after) + + filters[dep_defaults.UPDATED_FILTER_DEPENDENCY_KEY] = provide_updated_filter + + if config.get("pagination_type") == "limit_offset": + page_size_query = Query( + alias="pageSize", ge=1, default=config.get("pagination_size", dep_defaults.DEFAULT_PAGINATION_SIZE) + ) + + def provide_limit_offset_pagination( + current_page: int = CURRENT_PAGE_QUERY, page_size: int = page_size_query + ) -> LimitOffsetFilter: + return LimitOffsetFilter(page_size, page_size * (current_page - 1)) + + filters[dep_defaults.LIMIT_OFFSET_FILTER_DEPENDENCY_KEY] = provide_limit_offset_pagination + + if search_fields := config.get("search"): + search_ignore_case_query = Query(alias="searchIgnoreCase", default=config.get("search_ignore_case", False)) + + def provide_search_filter( + search_string: StringOrNone = SEARCH_STRING_QUERY, ignore_case: BooleanOrNone = search_ignore_case_query + ) -> SearchFilter: + field_names = set(search_fields.split(",")) if isinstance(search_fields, str) else set(search_fields) + + return SearchFilter( + field_name=field_names, + value=search_string, # type: ignore[arg-type] + ignore_case=ignore_case or False, + ) + + filters[dep_defaults.SEARCH_FILTER_DEPENDENCY_KEY] = provide_search_filter + + if sort_field := config.get("sort_field"): + + def provide_order_by( + field_name: StringOrNone = Query(alias="orderBy", default=sort_field), + sort_order: SortOrderOrNone = Query(alias="sortOrder", default=config.get("sort_order", "desc")), + ) -> OrderByFilter: + return OrderByFilter(field_name=field_name, sort_order=sort_order) # type: ignore[arg-type] + + filters[dep_defaults.ORDER_BY_FILTER_DEPENDENCY_KEY] = provide_order_by + + if not_in_fields := config.get("not_in_fields"): + not_in_fields = {not_in_fields} if isinstance(not_in_fields, (str, FieldNameType)) else not_in_fields + + for field_def in not_in_fields: + field_def = FieldNameType(name=field_def, type_hint=str) if isinstance(field_def, str) else field_def + + def create_not_in_filter_provider( # pyright: ignore + field_name: FieldNameType, + ) -> Callable[..., Optional[NotInCollectionFilter[field_def.type_hint]]]: # type: ignore + def provide_not_in_filter( # pyright: ignore + values: Optional[list[field_name.type_hint]] = Query( # type: ignore + alias=camelize(f"{field_name.name}_not_in"), default=None + ), + ) -> Optional[NotInCollectionFilter[field_name.type_hint]]: # type: ignore + return ( + NotInCollectionFilter[field_name.type_hint](field_name=field_name.name, values=values) # type: ignore + if values + else None + ) + + return provide_not_in_filter # pyright: ignore + + provider = create_not_in_filter_provider(field_def) # pyright: ignore + filters[f"{field_def.name}_not_in_filter"] = provider # pyright: ignore + + if in_fields := config.get("in_fields"): + in_fields = {in_fields} if isinstance(in_fields, (str, FieldNameType)) else in_fields + + for field_def in in_fields: + field_def = FieldNameType(name=field_def, type_hint=str) if isinstance(field_def, str) else field_def + + def create_in_filter_provider( # pyright: ignore + field_name: FieldNameType, + ) -> Callable[..., Optional[InCollectionFilter[field_def.type_hint]]]: # type: ignore + def provide_in_filter( # pyright: ignore + values: Optional[list[field_name.type_hint]] = Query( # type: ignore + alias=camelize(f"{field_name.name}_in"), default=None + ), + ) -> Optional[InCollectionFilter[field_name.type_hint]]: # type: ignore + return ( + InCollectionFilter[field_name.type_hint](field_name=field_name.name, values=values) # type: ignore + if values + else None + ) + + return provide_in_filter # pyright: ignore + + provider = create_in_filter_provider(field_def) # type: ignore + filters[f"{field_def.name}_in_filter"] = provider # type: ignore + + if filters: + filters[dep_defaults.FILTERS_DEPENDENCY_KEY] = _create_filter_aggregate_function(config) + + return filters + + +def _create_filter_aggregate_function(config: FilterConfig) -> Callable[..., list[FilterTypes]]: + """Create filter aggregation function based on configuration. + + Args: + config: The filter configuration. + + Returns: + Function that returns list of configured filters. + """ + + parameters: dict[str, inspect.Parameter] = {} + annotations: dict[str, Any] = {} + + if cls := config.get("id_filter"): + parameters["id_filter"] = inspect.Parameter( + name="id_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=InCollectionFilter[cls], # type: ignore[valid-type] + ) + annotations["id_filter"] = InCollectionFilter[cls] # type: ignore[valid-type] + + if config.get("created_at"): + parameters["created_filter"] = inspect.Parameter( + name="created_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=BeforeAfterFilter, + ) + annotations["created_filter"] = BeforeAfterFilter + + if config.get("updated_at"): + parameters["updated_filter"] = inspect.Parameter( + name="updated_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=BeforeAfterFilter, + ) + annotations["updated_filter"] = BeforeAfterFilter + + if config.get("search"): + parameters["search_filter"] = inspect.Parameter( + name="search_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=SearchFilter, + ) + annotations["search_filter"] = SearchFilter + + if config.get("pagination_type") == "limit_offset": + parameters["limit_offset_filter"] = inspect.Parameter( + name="limit_offset_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=LimitOffsetFilter, + ) + annotations["limit_offset_filter"] = LimitOffsetFilter + + if config.get("sort_field"): + parameters["order_by_filter"] = inspect.Parameter( + name="order_by_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=OrderByFilter, + ) + annotations["order_by_filter"] = OrderByFilter + + if not_in_fields := config.get("not_in_fields"): + for field_def in not_in_fields: + field_def = FieldNameType(name=field_def, type_hint=str) if isinstance(field_def, str) else field_def + parameters[f"{field_def.name}_not_in_filter"] = inspect.Parameter( + name=f"{field_def.name}_not_in_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=NotInCollectionFilter[field_def.type_hint], # type: ignore + ) + annotations[f"{field_def.name}_not_in_filter"] = NotInCollectionFilter[field_def.type_hint] # type: ignore + + if in_fields := config.get("in_fields"): + for field_def in in_fields: + field_def = FieldNameType(name=field_def, type_hint=str) if isinstance(field_def, str) else field_def + parameters[f"{field_def.name}_in_filter"] = inspect.Parameter( + name=f"{field_def.name}_in_filter", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + default=Depends(), + annotation=InCollectionFilter[field_def.type_hint], # type: ignore + ) + annotations[f"{field_def.name}_in_filter"] = InCollectionFilter[field_def.type_hint] # type: ignore + + def provide_filters(**kwargs: FilterTypes) -> list[FilterTypes]: + """Aggregate filter dependencies based on configuration. + + Args: + **kwargs: Filter parameters dynamically provided based on configuration. + + Returns: + List of configured filters. + """ + filters: list[FilterTypes] = [] + if id_filter := kwargs.get("id_filter"): + filters.append(id_filter) + if created_filter := kwargs.get("created_filter"): + filters.append(created_filter) + if limit_offset := kwargs.get("limit_offset_filter"): + filters.append(limit_offset) + if updated_filter := kwargs.get("updated_filter"): + filters.append(updated_filter) + if ( + (search_filter := cast("Optional[SearchFilter]", kwargs.get("search_filter"))) + and search_filter is not None # pyright: ignore[reportUnnecessaryComparison] + and search_filter.field_name is not None # pyright: ignore[reportUnnecessaryComparison] + and search_filter.value is not None # pyright: ignore[reportUnnecessaryComparison] + ): + filters.append(search_filter) + if ( + (order_by := cast("Optional[OrderByFilter]", kwargs.get("order_by_filter"))) + and order_by is not None # pyright: ignore[reportUnnecessaryComparison] + and order_by.field_name is not None # pyright: ignore[reportUnnecessaryComparison] + ): + filters.append(order_by) + + if not_in_fields := config.get("not_in_fields"): + not_in_fields = {not_in_fields} if isinstance(not_in_fields, (str, FieldNameType)) else not_in_fields + for field_def in not_in_fields: + field_def = FieldNameType(name=field_def, type_hint=str) if isinstance(field_def, str) else field_def + filter_ = kwargs.get(f"{field_def.name}_not_in_filter") + if filter_ is not None: + filters.append(filter_) + + if in_fields := config.get("in_fields"): + in_fields = {in_fields} if isinstance(in_fields, (str, FieldNameType)) else in_fields + for field_def in in_fields: + field_def = FieldNameType(name=field_def, type_hint=str) if isinstance(field_def, str) else field_def + filter_ = kwargs.get(f"{field_def.name}_in_filter") + if filter_ is not None: + filters.append(filter_) + return filters + + provide_filters.__signature__ = inspect.Signature( # type: ignore + parameters=list(parameters.values()), return_annotation=list[FilterTypes] + ) + provide_filters.__annotations__ = annotations + provide_filters.__annotations__["return"] = list[FilterTypes] + + return provide_filters diff --git a/sqlspec/extensions/flask/__init__.py b/sqlspec/extensions/flask/__init__.py new file mode 100644 index 000000000..b0dccd59f --- /dev/null +++ b/sqlspec/extensions/flask/__init__.py @@ -0,0 +1,43 @@ +from sqlspec.extensions.flask.config import DatabaseConfig +from sqlspec.extensions.flask.extension import SQLSpec +from sqlspec.extensions.flask.providers import ( + DEPENDENCY_DEFAULTS, + DependencyDefaults, + FilterConfig, + create_filter_dependencies, + provide_filters, +) +from sqlspec.extensions.flask.utils import ( + FlaskServiceMixin, + create_flask_error_response, + get_blueprint_name, + get_current_connection, + get_current_session, + get_flask_app, + get_request_endpoint, + get_sqlspec_from_flask, + is_flask_context_active, + validate_flask_context, + with_flask_session, +) + +__all__ = ( + "DEPENDENCY_DEFAULTS", + "DatabaseConfig", + "DependencyDefaults", + "FilterConfig", + "FlaskServiceMixin", + "SQLSpec", + "create_filter_dependencies", + "create_flask_error_response", + "get_blueprint_name", + "get_current_connection", + "get_current_session", + "get_flask_app", + "get_request_endpoint", + "get_sqlspec_from_flask", + "is_flask_context_active", + "provide_filters", + "validate_flask_context", + "with_flask_session", +) diff --git a/sqlspec/extensions/flask/_context.py b/sqlspec/extensions/flask/_context.py new file mode 100644 index 000000000..f8a8c807e --- /dev/null +++ b/sqlspec/extensions/flask/_context.py @@ -0,0 +1,233 @@ +"""Flask-specific context managers for SQLSpec database sessions.""" + +import contextlib +from typing import TYPE_CHECKING, Any, cast + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Callable + + from sqlspec.config import AsyncConfigT, DatabaseConfigProtocol, DriverT, SyncConfigT + from sqlspec.typing import ConnectionT + + +__all__ = ("FlaskSessionContext", "get_flask_connection", "provide_flask_session") + + +class FlaskSessionContext: + """Context manager for Flask request-scoped database sessions. + + This context manager integrates with Flask's g object to provide + request-scoped database connections and sessions. + """ + + def __init__(self, config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str) -> None: + """Initialize Flask session context. + + Args: + config: The database configuration. + connection_key: Key to store connection in Flask's g object. + """ + self.config = config + self.connection_key = connection_key + self._connection: ConnectionT | None = None + + def __enter__(self) -> "DriverT": + """Enter the context and return a database session. + + Returns: + Database driver/session instance. + """ + try: + from flask import g + except ImportError: + msg = "Flask is required for FlaskSessionContext" + raise RuntimeError(msg) from None + + # Check if connection already exists in Flask's g + connection = getattr(g, self.connection_key, None) + + if connection is None: + # Create new connection - this should be handled by the extension + from sqlspec.extensions.flask._providers import create_connection_provider + + connection_provider = create_connection_provider(self.config, "pool", self.connection_key) + connection = connection_provider() + setattr(g, self.connection_key, connection) + + self._connection = connection + + # Create and return session/driver + return cast("DriverT", self.config.driver_type(connection=connection)) + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + """Exit the context and handle cleanup. + + Args: + exc_type: Exception type if an exception occurred. + exc_val: Exception value if an exception occurred. + exc_tb: Exception traceback if an exception occurred. + """ + # Connection cleanup is handled by Flask's teardown handlers + # We don't close it here as it might be reused within the same request + + +def get_flask_connection(config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str) -> "ConnectionT | None": + """Get the current Flask connection from g object. + + Args: + config: The database configuration. + connection_key: Key used to store connection in Flask's g object. + + Returns: + The connection instance if available, None otherwise. + """ + try: + from flask import g + return getattr(g, connection_key, None) + except ImportError: + return None + + +@contextlib.asynccontextmanager +async def provide_flask_session( + config: "SyncConfigT | AsyncConfigT", connection_key: str = "db_connection" +) -> "AsyncGenerator[DriverT, None]": + """Async context manager for Flask database sessions. + + This provides a bridge between SQLSpec's async session management + and Flask's synchronous request context. + + Args: + config: The database configuration. + connection_key: Key used to store connection in Flask's g object. + + Yields: + Database driver/session instance. + """ + try: + from flask import g + except ImportError: + msg = "Flask is required for provide_flask_session" + raise RuntimeError(msg) from None + + # Check if connection already exists in Flask's g + connection = getattr(g, connection_key, None) + + connection_created = False + try: + if connection is None: + # Create new connection + if hasattr(config, "create_pool"): + pool = await config.create_pool() # type: ignore[attr-defined] + connection_cm = config.provide_connection(pool) # type: ignore[attr-defined] + + if hasattr(connection_cm, "__aenter__"): + connection = await connection_cm.__aenter__() + else: + connection = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + + setattr(g, connection_key, connection) + connection_created = True + else: + msg = f"Configuration {config} does not support connection creation" + raise RuntimeError(msg) + + # Create and yield session/driver + yield cast("DriverT", config.driver_type(connection=connection)) # type: ignore[attr-defined] + + finally: + # Only clean up connection if we created it + if connection_created and connection: + with contextlib.suppress(Exception): + if hasattr(connection, "close") and callable(connection.close): + await connection.close() if hasattr(connection.close, "__await__") else connection.close() + + # Remove from Flask's g object + if hasattr(g, connection_key): + delattr(g, connection_key) + + +def create_flask_request_session_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str +) -> "Callable[[], DriverT]": + """Create a Flask request-scoped session provider. + + Args: + config: The database configuration. + connection_key: Key used to store connection in Flask's g object. + + Returns: + A function that provides sessions within Flask request context. + """ + + def get_session() -> "DriverT": + """Get or create a database session for the current Flask request. + + Returns: + Database driver/session instance. + """ + try: + from flask import g + except ImportError: + msg = "Flask is required for request-scoped sessions" + raise RuntimeError(msg) from None + + session_key = f"_sqlspec_session_{connection_key}" + + # Check if session already exists in Flask's g + session = getattr(g, session_key, None) + if session is not None: + return session + + # Get or create connection + connection = getattr(g, connection_key, None) + if connection is None: + msg = f"No database connection available. Key: {connection_key}" + raise RuntimeError(msg) + + # Create new session + session = cast("DriverT", config.driver_type(connection=connection)) + setattr(g, session_key, session) + + return session + + return get_session + + +def setup_flask_session_cleanup(app: Any, connection_key: str, session_key: str) -> None: + """Setup Flask teardown handlers for database session cleanup. + + Args: + app: The Flask application instance. + connection_key: Key used for connection storage. + session_key: Key used for session storage. + """ + + @app.teardown_appcontext + def cleanup_database_resources(exception: "Exception | None" = None) -> None: + """Clean up database sessions and connections on request teardown. + + Args: + exception: Exception that occurred during request processing, if any. + """ + try: + from flask import g + except ImportError: + return + + # Clean up session + session_storage_key = f"_sqlspec_session_{connection_key}" + session = getattr(g, session_storage_key, None) + if session is not None: + with contextlib.suppress(Exception): + if hasattr(session, "close") and callable(session.close): + session.close() + delattr(g, session_storage_key) + + # Clean up connection (if it exists and needs cleanup) + connection = getattr(g, connection_key, None) + if connection is not None: + with contextlib.suppress(Exception): + if hasattr(connection, "close") and callable(connection.close): + connection.close() + delattr(g, connection_key) diff --git a/sqlspec/extensions/flask/_providers.py b/sqlspec/extensions/flask/_providers.py new file mode 100644 index 000000000..040181e80 --- /dev/null +++ b/sqlspec/extensions/flask/_providers.py @@ -0,0 +1,144 @@ +"""Provider functions for SQLSpec Flask integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any, cast + +from sqlspec.utils.portal import PortalProvider +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable, Callable + + from sqlspec.config import DatabaseConfigProtocol, DriverT + from sqlspec.typing import ConnectionT, PoolT + + +__all__ = ("create_connection_provider", "create_pool_provider", "create_session_provider") + + +def create_pool_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str +) -> "Callable[[], Awaitable[PoolT]]": + """Create provider for database pool access. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + + Returns: + The pool provider function. + """ + + async def provide_pool() -> "PoolT": + """Provide the database pool. + + Returns: + The database connection pool. + """ + db_pool = await ensure_async_(config.create_pool)() + return cast("PoolT", db_pool) + + def sync_provide_pool() -> "PoolT": + """Synchronous wrapper for pool provider.""" + portal = PortalProvider() + if not portal.is_running: + portal.start() + return portal.call(provide_pool) + + # Return sync version for Flask + return sync_provide_pool # type: ignore[return-value] + + +def create_connection_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str, connection_key: str +) -> "Callable[[], AsyncGenerator[ConnectionT, None]]": + """Create provider for database connections. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + connection_key: The key used to store the connection. + + Returns: + The connection provider function. + """ + + async def provide_connection() -> "AsyncGenerator[ConnectionT, None]": + """Provide a database connection. + + Yields: + Database connection instance. + """ + db_pool = await ensure_async_(config.create_pool)() + + try: + connection_cm = config.provide_connection(db_pool) + + # Handle both context managers and direct connections + if hasattr(connection_cm, "__aenter__"): + async with connection_cm as conn: + yield cast("ConnectionT", conn) + else: + conn = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + yield cast("ConnectionT", conn) + finally: + with contextlib.suppress(Exception): + await ensure_async_(config.close_pool)() + + def sync_provide_connection() -> "ConnectionT": + """Synchronous wrapper for connection provider.""" + + # Use the portal to get a connection synchronously + async def get_connection() -> "ConnectionT": + async for conn in provide_connection(): + return conn + return None + + portal = PortalProvider() + if not portal.is_running: + portal.start() + return portal.call(get_connection) + + # Return sync version for Flask + return sync_provide_connection # type: ignore[return-value] + + +def create_session_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str +) -> "Callable[[ConnectionT], AsyncGenerator[DriverT, None]]": + """Create provider for database sessions/drivers. + + Args: + config: The database configuration object. + connection_key: The key used to access the connection. + + Returns: + The session provider function. + """ + + async def provide_session(connection: "ConnectionT") -> "AsyncGenerator[DriverT, None]": + """Provide a database session/driver. + + Args: + connection: The database connection. + + Yields: + Database driver/session instance. + """ + yield cast("DriverT", config.driver_type(connection=connection)) + + def sync_provide_session(connection: "ConnectionT") -> "DriverT": + """Synchronous wrapper for session provider.""" + + async def get_session() -> "DriverT": + async for session in provide_session(connection): + return session + return None + + portal = PortalProvider() + if not portal.is_running: + portal.start() + return portal.call(get_session) + + # Return sync version for Flask + return sync_provide_session # type: ignore[return-value] diff --git a/sqlspec/extensions/flask/cli.py b/sqlspec/extensions/flask/cli.py new file mode 100644 index 000000000..ff16356b2 --- /dev/null +++ b/sqlspec/extensions/flask/cli.py @@ -0,0 +1,63 @@ +"""Flask CLI integration for SQLSpec migrations.""" + +import contextlib +from typing import TYPE_CHECKING, cast + +from flask.cli import with_appcontext + +from sqlspec.cli import add_migration_commands + +try: + import rich_click as click +except ImportError: + import click # type: ignore[no-redef] + +if TYPE_CHECKING: + from flask import Flask + + from sqlspec.extensions.flask.extension import SQLSpec + +__all__ = ("database_group", "get_database_migration_plugin") + + +def get_database_migration_plugin(app: "Flask") -> "SQLSpec": + """Retrieve the SQLSpec plugin from the Flask application extensions. + + Args: + app: The Flask application + + Returns: + The SQLSpec plugin + + Raises: + ImproperConfigurationError: If the SQLSpec plugin is not found + """ + from sqlspec.exceptions import ImproperConfigurationError + + # Check if SQLSpec was stored in app.extensions + with contextlib.suppress(AttributeError, KeyError): + if hasattr(app, "extensions") and "sqlspec" in app.extensions: + # Get the first SQLSpec configuration + for config in app.extensions["sqlspec"].values(): + if hasattr(config, "__class__") and "SQLSpec" in str(config.__class__): + return cast("SQLSpec", config) + + msg = "Failed to initialize database migrations. The required SQLSpec plugin is missing." + raise ImproperConfigurationError(msg) + + +@click.group(name="db") +@with_appcontext +def database_group() -> None: + """Manage SQLSpec database components. + + This command group provides database management commands like migrations. + """ + from flask import current_app + + # Ensure we have the SQLSpec extension + get_database_migration_plugin(current_app) + + +# Add migration commands to the database group +add_migration_commands(database_group) diff --git a/sqlspec/extensions/flask/config.py b/sqlspec/extensions/flask/config.py new file mode 100644 index 000000000..8b2e159d1 --- /dev/null +++ b/sqlspec/extensions/flask/config.py @@ -0,0 +1,218 @@ +"""Configuration classes for SQLSpec Flask integration.""" + +import contextlib +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union + +from sqlspec.exceptions import ImproperConfigurationError + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable + + from flask import Flask + + from sqlspec.config import AsyncConfigT, DriverT, SyncConfigT + from sqlspec.typing import ConnectionT, PoolT + + +CommitMode = Literal["manual", "autocommit", "autocommit_include_redirect"] +DEFAULT_COMMIT_MODE: CommitMode = "manual" +DEFAULT_CONNECTION_KEY = "db_connection" +DEFAULT_POOL_KEY = "db_pool" +DEFAULT_SESSION_KEY = "db_session" + +__all__ = ( + "DEFAULT_COMMIT_MODE", + "DEFAULT_CONNECTION_KEY", + "DEFAULT_POOL_KEY", + "DEFAULT_SESSION_KEY", + "AsyncDatabaseConfig", + "CommitMode", + "DatabaseConfig", + "SyncDatabaseConfig", +) + + +@dataclass +class DatabaseConfig: + """Configuration for SQLSpec database integration with Flask applications.""" + + config: "Union[SyncConfigT, AsyncConfigT]" = field() # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] + connection_key: str = field(default=DEFAULT_CONNECTION_KEY) + pool_key: str = field(default=DEFAULT_POOL_KEY) + session_key: str = field(default=DEFAULT_SESSION_KEY) + commit_mode: "CommitMode" = field(default=DEFAULT_COMMIT_MODE) + extra_commit_statuses: "Optional[set[int]]" = field(default=None) + extra_rollback_statuses: "Optional[set[int]]" = field(default=None) + create_all: bool = field(default=False) + + # Generated providers and handlers + connection_provider: "Optional[Callable[[], AsyncGenerator[ConnectionT, None]]]" = field( + init=False, repr=False, hash=False, default=None + ) + pool_provider: "Optional[Callable[[], Awaitable[PoolT]]]" = field(init=False, repr=False, hash=False, default=None) + session_provider: "Optional[Callable[[ConnectionT], AsyncGenerator[DriverT, None]]]" = field( + init=False, repr=False, hash=False, default=None + ) + + def __post_init__(self) -> None: + """Initialize providers after object creation.""" + if not self.config.supports_connection_pooling and self.pool_key == DEFAULT_POOL_KEY: # type: ignore[union-attr,unused-ignore] + self.pool_key = f"_{self.pool_key}_{id(self.config)}" + + # Validate commit mode + if self.commit_mode not in {"manual", "autocommit", "autocommit_include_redirect"}: + msg = f"Invalid commit mode: {self.commit_mode}" + raise ImproperConfigurationError(detail=msg) + + # Validate status code sets + if ( + self.extra_commit_statuses + and self.extra_rollback_statuses + and self.extra_commit_statuses & self.extra_rollback_statuses + ): + msg = "Extra rollback statuses and commit statuses must not share any status codes" + raise ImproperConfigurationError(msg) + + def init_app(self, app: "Flask") -> None: + """Initialize SQLSpec configuration for Flask application. + + Args: + app: The Flask application instance. + """ + from sqlspec.extensions.flask._providers import ( + create_connection_provider, + create_pool_provider, + create_session_provider, + ) + + # Create providers + self.pool_provider = create_pool_provider(self.config, self.pool_key) + self.connection_provider = create_connection_provider(self.config, self.pool_key, self.connection_key) + self.session_provider = create_session_provider(self.config, self.connection_key) + + # Register teardown handlers + app.teardown_appcontext(self._teardown_session) + + # Add before/after request handlers for transaction management + if self.commit_mode != "manual": + app.before_request(self._before_request) + app.after_request(self._after_request_factory(app)) + + # Store configuration reference in Flask app + if not hasattr(app, "extensions"): + app.extensions = {} + if "sqlspec" not in app.extensions: + app.extensions["sqlspec"] = {} + app.extensions["sqlspec"][self.pool_key] = self + + def _before_request(self) -> None: + """Set up database connection before request processing.""" + + # Flask's g object is used to store request-scoped data + # The connection will be created lazily when first accessed + + def _after_request_factory(self, app: "Flask") -> "Callable[[Any], Any]": + """Create after request handler with app context. + + Args: + app: The Flask application instance. + + Returns: + After request handler function. + """ + + def after_request(response: Any) -> Any: + """Handle transaction commit/rollback after request processing. + + Args: + response: Flask response object. + + Returns: + The response object. + """ + from flask import g + + # Get connection from Flask's g object if it exists + connection = getattr(g, self.connection_key, None) + if connection is None: + return response + + try: + should_commit = self._should_commit_transaction(response.status_code) + + if should_commit and hasattr(connection, "commit") and callable(connection.commit): + connection.commit() + elif hasattr(connection, "rollback") and callable(connection.rollback): + connection.rollback() + except Exception: + # Always try to rollback on exception + if hasattr(connection, "rollback") and callable(connection.rollback): + with contextlib.suppress(Exception): + connection.rollback() + + return response + + return after_request + + def _should_commit_transaction(self, status_code: int) -> bool: + """Determine if transaction should be committed based on status code. + + Args: + status_code: HTTP response status code. + + Returns: + True if transaction should be committed, False otherwise. + """ + http_ok = 200 + http_multiple_choices = 300 + http_bad_request = 400 + + should_commit = False + + if self.commit_mode == "autocommit": + should_commit = http_ok <= status_code < http_multiple_choices + elif self.commit_mode == "autocommit_include_redirect": + should_commit = http_ok <= status_code < http_bad_request + + # Apply extra status overrides + if self.extra_commit_statuses and status_code in self.extra_commit_statuses: + should_commit = True + elif self.extra_rollback_statuses and status_code in self.extra_rollback_statuses: + should_commit = False + + return should_commit + + def _teardown_session(self, exception: "Optional[BaseException]" = None) -> None: + """Clean up database connections and sessions at the end of request. + + Args: + exception: Exception that occurred during request processing, if any. + """ + from flask import g + + # Clean up cached session first + session_key = f"_sqlspec_session_{self.connection_key}" + session = getattr(g, session_key, None) + if session is not None: + if hasattr(session, "close") and callable(session.close): + with contextlib.suppress(Exception): + session.close() + delattr(g, session_key) + + # Close any open connection + connection = getattr(g, self.connection_key, None) + if connection is not None: + if hasattr(connection, "close") and callable(connection.close): + with contextlib.suppress(Exception): + connection.close() + delattr(g, self.connection_key) + + +# Add typed subclasses for better developer experience +class SyncDatabaseConfig(DatabaseConfig): + """Sync-specific DatabaseConfig with better typing for Flask applications.""" + + +class AsyncDatabaseConfig(DatabaseConfig): + """Async-specific DatabaseConfig with better typing for Flask applications.""" diff --git a/sqlspec/extensions/flask/extension.py b/sqlspec/extensions/flask/extension.py new file mode 100644 index 000000000..1667ee121 --- /dev/null +++ b/sqlspec/extensions/flask/extension.py @@ -0,0 +1,336 @@ +"""SQLSpec extension for Flask applications.""" + +from __future__ import annotations + +from contextlib import asynccontextmanager, suppress +from typing import TYPE_CHECKING, overload + +from sqlspec.base import SQLSpec as SQLSpecBase +from sqlspec.config import AsyncConfigT, DatabaseConfigProtocol, DriverT, SyncConfigT +from sqlspec.extensions.flask.config import DatabaseConfig +from sqlspec.utils.logging import get_logger +from sqlspec.utils.portal import PortalProvider + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + from flask import Flask + + from sqlspec.loader import SQLFileLoader + + +logger = get_logger("extensions.flask") + +__all__ = ("SQLSpec",) + + +class SQLSpec(SQLSpecBase): + """SQLSpec integration for Flask applications.""" + + __slots__ = ("_app", "_configs", "_portal_started") + + def __init__( + self, + config: SyncConfigT | AsyncConfigT | DatabaseConfig | list[DatabaseConfig], + *, + loader: SQLFileLoader | None = None, + ) -> None: + """Initialize SQLSpec for Flask. + + Args: + config: Database configuration(s) for SQLSpec. + loader: Optional SQL file loader instance. + """ + super().__init__(loader=loader) + self._app: Flask | None = None + self._portal_started = False + + if isinstance(config, DatabaseConfigProtocol): + self._configs: list[DatabaseConfig] = [DatabaseConfig(config=config)] + elif isinstance(config, DatabaseConfig): + self._configs = [config] + else: + self._configs = config + + @property + def config(self) -> list[DatabaseConfig]: + """Return the database configurations. + + Returns: + List of database configurations. + """ + return self._configs + + def add_config(self, config: SyncConfigT | AsyncConfigT) -> type[SyncConfigT | AsyncConfigT]: + """Add a configuration instance to the registry. + + Args: + config: The configuration instance to add. + + Returns: + The type of the added configuration for use as a registry key. + """ + config_type = type(config) + self._base_configs[config_type] = config + return config_type + + def init_app(self, app: Flask) -> None: + """Initialize SQLSpec with Flask application. + + Args: + app: The Flask application instance. + """ + self._app = app + + # Start the portal provider for async operations + self._ensure_portal_started() + + # Initialize each database configuration + for db_config in self._configs: + # Add the configuration to SQLSpec base + annotation = self.add_config(db_config.config) + db_config.annotation = annotation # type: ignore[attr-defined] + + # Initialize with the app + db_config.init_app(app) + + # Register shutdown handler for the portal + self._register_shutdown_handler(app) + + def _ensure_portal_started(self) -> None: + """Ensure the portal provider is started for async operations.""" + if not self._portal_started and self._has_async_config(): + portal = PortalProvider() + if not portal.is_running: + portal.start() + self._portal_started = True + + def _has_async_config(self) -> bool: + """Check if any configurations are async. + + Returns: + True if at least one configuration supports async operations. + """ + return any(hasattr(cfg.config, "is_async") and cfg.config.is_async for cfg in self._configs) # type: ignore[attr-defined] + + def _register_shutdown_handler(self, app: Flask) -> None: + """Register application shutdown handler to clean up portal. + + Args: + app: The Flask application instance. + """ + + @app.teardown_appcontext + def shutdown_portal(exception: Exception | None = None) -> None: + """Clean up portal on application shutdown.""" + if self._portal_started: + portal = PortalProvider() + if portal.is_running: + portal.stop() + self._portal_started = False + + def get_session(self, config: SyncConfigT | AsyncConfigT | None = None) -> DriverT: + """Get a database session for the given configuration. + + Args: + config: Configuration instance to get session for. If None, uses first config. + + Returns: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + """ + if config is None: + if not self._configs: + msg = "No database configurations available" + raise RuntimeError(msg) + config = self._configs[0].config + + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + # Check if annotation attribute exists and matches, otherwise check config directly + annotation = getattr(cfg, "annotation", None) + if config in (cfg.config, annotation) or (annotation is None and config == cfg.config): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + from flask import g + + # Check if we already have a session in Flask's g object + session_key = f"_sqlspec_session_{db_config.connection_key}" + session = getattr(g, session_key, None) + if session is not None: + return session + + # Check if we already have a connection in Flask's g object + connection = getattr(g, db_config.connection_key, None) + if connection is None: + # Create new connection using provider + if db_config.connection_provider: + connection = db_config.connection_provider() + setattr(g, db_config.connection_key, connection) + else: + msg = f"No connection provider available for {config}" + raise RuntimeError(msg) + + # Create session using provider + if db_config.session_provider: + session = db_config.session_provider(connection) + else: + # Fallback: create driver directly + session = db_config.config.driver_type(connection=connection) # type: ignore[attr-defined] + + # Cache session in Flask's g object + setattr(g, session_key, session) + return session + + def get_async_session(self, config: AsyncConfigT | None = None) -> DriverT: + """Get an async database session for the given configuration. + + Args: + config: Async configuration instance to get session for. + + Returns: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + ValueError: If configuration is not async. + """ + if config is not None and not (hasattr(config, "is_async") and config.is_async): # type: ignore[attr-defined] + msg = "Configuration must be async" + raise ValueError(msg) + + return self.get_session(config) + + def get_sync_session(self, config: SyncConfigT | None = None) -> DriverT: + """Get a sync database session for the given configuration. + + Args: + config: Sync configuration instance to get session for. + + Returns: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + ValueError: If configuration is async. + """ + if config is not None and hasattr(config, "is_async") and config.is_async: # type: ignore[attr-defined] + msg = "Configuration must be sync" + raise ValueError(msg) + + return self.get_session(config) + + @overload + def provide_session(self, config: SyncConfigT) -> AsyncGenerator[DriverT, None]: ... + + @overload + def provide_session(self, config: AsyncConfigT) -> AsyncGenerator[DriverT, None]: ... + + @overload + def provide_session(self, config: type[SyncConfigT | AsyncConfigT]) -> AsyncGenerator[DriverT, None]: ... + + @asynccontextmanager + async def provide_session( + self, config: SyncConfigT | AsyncConfigT | type[SyncConfigT | AsyncConfigT] + ) -> AsyncGenerator[DriverT, None]: + """Provide a database session for the given configuration. + + Args: + config: Configuration instance or type to get session for. + + Yields: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + # Check if annotation attribute exists and matches, otherwise check config directly + annotation = getattr(cfg, "annotation", None) + if config in (cfg.config, annotation) or (annotation is None and config == cfg.config): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get connection and create session + if db_config.connection_provider: + connection = db_config.connection_provider() + try: + if db_config.session_provider: + yield db_config.session_provider(connection) + else: + # Fallback: create driver directly + yield db_config.config.driver_type(connection=connection) # type: ignore[attr-defined] + finally: + # Clean up connection + if hasattr(connection, "close") and callable(connection.close): + with suppress(Exception): + connection.close() + # Fallback: create session directly from config + elif hasattr(db_config.config, "create_pool"): + from sqlspec.utils.sync_tools import ensure_async_ + + pool = await ensure_async_(db_config.config.create_pool)() # type: ignore[attr-defined] + connection_cm = db_config.config.provide_connection(pool) # type: ignore[attr-defined] + + try: + if hasattr(connection_cm, "__aenter__"): + async with connection_cm as conn: + yield db_config.config.driver_type(connection=conn) # type: ignore[attr-defined] + elif hasattr(connection_cm, "__enter__"): + # Sync context manager + with connection_cm as conn: + yield db_config.config.driver_type(connection=conn) # type: ignore[attr-defined] + else: + # Not a context manager, try to use as connection directly + conn = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + try: + yield db_config.config.driver_type(connection=conn) # type: ignore[attr-defined] + finally: + if hasattr(conn, "close") and callable(conn.close): + with suppress(Exception): + if hasattr(conn.close, "__await__"): + await conn.close() + else: + conn.close() + finally: + if hasattr(db_config.config, "close_pool"): + with suppress(Exception): + await ensure_async_(db_config.config.close_pool)() # type: ignore[attr-defined] + else: + msg = f"Configuration {config} does not support async session creation" + raise RuntimeError(msg) + + def get_annotation( + self, key: str | SyncConfigT | AsyncConfigT | type[SyncConfigT | AsyncConfigT] + ) -> type[SyncConfigT | AsyncConfigT]: + """Return the annotation for the given configuration. + + Args: + key: The configuration instance, type, or key to lookup. + + Returns: + The annotation for the configuration. + + Raises: + KeyError: If no configuration is found for the given key. + """ + for cfg in self._configs: + annotation = getattr(cfg, "annotation", None) + if key in {cfg.config, annotation, cfg.connection_key, cfg.pool_key}: + return annotation or type(cfg.config) # type: ignore[attr-defined] + msg = f"No configuration found for {key}" + raise KeyError(msg) diff --git a/sqlspec/extensions/flask/utils.py b/sqlspec/extensions/flask/utils.py new file mode 100644 index 000000000..58b00fd43 --- /dev/null +++ b/sqlspec/extensions/flask/utils.py @@ -0,0 +1,260 @@ +"""Utility mixins and helpers for SQLSpec Flask integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any, Optional + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + from sqlspec.config import DatabaseConfigProtocol, DriverT + from sqlspec.extensions.flask.config import DatabaseConfig + +__all__ = ( + "FlaskServiceMixin", + "get_current_connection", + "get_current_session", + "get_flask_app", + "is_flask_context_active", + "with_flask_session", +) + + +class FlaskServiceMixin: + """Mixin providing Flask-specific utilities for SQLSpec services.""" + + def jsonify(self, data: Any, status: int = 200, **kwargs: Any) -> Any: + """Create a JSON response using Flask's response system. + + Args: + data: Data to serialize to JSON. + status: HTTP status code for the response. + **kwargs: Additional arguments to pass to the response. + + Returns: + Flask response object with JSON content. + """ + from flask import jsonify as flask_jsonify + + response = flask_jsonify(data) + response.status_code = status + + # Apply any additional response modifications + for key, value in kwargs.items(): + setattr(response, key, value) + + return response + + def get_request_args(self) -> dict[str, Any]: + """Get request arguments from Flask request. + + Returns: + Dictionary of request arguments. + """ + try: + from flask import request + + return dict(request.args) if hasattr(request, "args") else {} + except (ImportError, RuntimeError): + return {} + + def get_request_json(self) -> dict[str, Any]: + """Get JSON data from Flask request. + + Returns: + Dictionary of JSON data, empty if not available. + """ + try: + from flask import request + + return request.get_json() or {} if hasattr(request, "get_json") else {} + except (ImportError, RuntimeError): + return {} + + def get_session_for_config(self, config: "DatabaseConfig") -> Optional["DriverT"]: + """Get database session for a specific configuration. + + Args: + config: The database configuration. + + Returns: + Database session if available, None otherwise. + """ + try: + from flask import g + + connection = getattr(g, config.connection_key, None) + if connection and config.session_provider: + return config.session_provider(connection) + except (ImportError, RuntimeError): + return None + else: + return None + + +def get_current_session(connection_key: str = "db_connection") -> Optional["DriverT"]: + """Get the current database session from Flask's g object. + + Args: + connection_key: Key used to identify the connection. + + Returns: + Database session if available, None otherwise. + """ + try: + from flask import g + + session_key = f"_sqlspec_session_{connection_key}" + return getattr(g, session_key, None) + except (ImportError, RuntimeError): + return None + + +def get_current_connection(connection_key: str = "db_connection") -> Any: + """Get the current database connection from Flask's g object. + + Args: + connection_key: Key used to identify the connection. + + Returns: + Database connection if available, None otherwise. + """ + try: + from flask import g + + return getattr(g, connection_key, None) + except (ImportError, RuntimeError): + return None + + +def get_flask_app() -> Optional[Any]: + """Get the current Flask application instance. + + Returns: + Flask app instance if available, None otherwise. + """ + try: + from flask import current_app + + return current_app._get_current_object() + except (ImportError, RuntimeError): + return None + + +def is_flask_context_active() -> bool: + """Check if Flask application context is active. + + Returns: + True if Flask context is active, False otherwise. + """ + try: + from flask import has_app_context, has_request_context + + return has_app_context() or has_request_context() + except (ImportError, RuntimeError): + return False + + +@contextlib.asynccontextmanager +async def with_flask_session( + config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str = "db_connection" +) -> "AsyncGenerator[DriverT, None]": + """Context manager for working with Flask database sessions. + + Args: + config: The database configuration. + connection_key: Key used to store connection in Flask's g object. + + Yields: + Database session instance. + """ + from sqlspec.extensions.flask._context import provide_flask_session + + async with provide_flask_session(config, connection_key) as session: + yield session + + +def get_sqlspec_from_flask(app: Any) -> Optional[Any]: + """Get SQLSpec extension from Flask app extensions. + + Args: + app: Flask application instance. + + Returns: + SQLSpec extension instance if found, None otherwise. + """ + if not hasattr(app, "extensions"): + return None + + extensions = app.extensions + if "sqlspec" not in extensions: + return None + + # Return the first SQLSpec configuration found + for config in extensions["sqlspec"].values(): + if hasattr(config, "__class__") and "SQLSpec" in str(config.__class__): + return config + + return None + + +def validate_flask_context() -> None: + """Validate that Flask context is available. + + Raises: + RuntimeError: If Flask context is not available. + """ + if not is_flask_context_active(): + msg = "This operation requires an active Flask context" + raise RuntimeError(msg) + + +def get_blueprint_name() -> Optional[str]: + """Get the current blueprint name from Flask request context. + + Returns: + Blueprint name if available, None otherwise. + """ + try: + from flask import request + + return request.blueprint if hasattr(request, "blueprint") else None + except (ImportError, RuntimeError): + return None + + +def get_request_endpoint() -> Optional[str]: + """Get the current request endpoint from Flask context. + + Returns: + Endpoint name if available, None otherwise. + """ + try: + from flask import request + + return request.endpoint if hasattr(request, "endpoint") else None + except (ImportError, RuntimeError): + return None + + +def create_flask_error_response(error: Exception, status_code: int = 500) -> Any: + """Create a Flask error response from an exception. + + Args: + error: The exception that occurred. + status_code: HTTP status code for the response. + + Returns: + Flask response object with error information. + """ + try: + from flask import jsonify + + error_data = {"error": str(error), "type": error.__class__.__name__, "status_code": status_code} + + response = jsonify(error_data) + response.status_code = status_code + except ImportError: + # Fallback if Flask is not available + return {"error": str(error), "status_code": status_code} + else: + return response diff --git a/sqlspec/extensions/sanic/__init__.py b/sqlspec/extensions/sanic/__init__.py new file mode 100644 index 000000000..aa17347d9 --- /dev/null +++ b/sqlspec/extensions/sanic/__init__.py @@ -0,0 +1,23 @@ +from sqlspec.extensions.sanic.config import DatabaseConfig +from sqlspec.extensions.sanic.extension import SQLSpec +from sqlspec.extensions.sanic.providers import ( + create_filter_provider, + create_service_provider, + provide_connection, + provide_filters, + provide_pool, + provide_service, + provide_session, +) + +__all__ = ( + "DatabaseConfig", + "SQLSpec", + "create_filter_provider", + "create_service_provider", + "provide_connection", + "provide_filters", + "provide_pool", + "provide_service", + "provide_session", +) diff --git a/sqlspec/extensions/sanic/_middleware.py b/sqlspec/extensions/sanic/_middleware.py new file mode 100644 index 000000000..781a982dd --- /dev/null +++ b/sqlspec/extensions/sanic/_middleware.py @@ -0,0 +1,174 @@ +"""Middleware system for SQLSpec Sanic integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any + +from sqlspec.utils.logging import get_logger +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from sqlspec.extensions.sanic.config import DatabaseConfig + +logger = get_logger("extensions.sanic.middleware") + +__all__ = ("SessionMiddleware",) + + +class SessionMiddleware: + """Session middleware for managing database sessions in Sanic requests.""" + + __slots__ = ("_connection_key", "_session_key", "database_config") + + def __init__(self, database_config: "DatabaseConfig") -> None: + """Initialize session middleware. + + Args: + database_config: The database configuration instance. + """ + self.database_config = database_config + self._connection_key = database_config.connection_key + self._session_key = database_config.session_key + + async def before_request(self, request: Any) -> None: + """Set up database connection and session before request processing. + + Args: + request: The Sanic request object. + """ + logger.debug("Setting up database connection for request %s", request.id) + + try: + # Only create connection if it doesn't exist + if not hasattr(request.ctx, self._connection_key): + # Get connection from provider if available + if self.database_config.connection_provider: + connection_gen = self.database_config.connection_provider() + connection = await connection_gen.__anext__() + setattr(request.ctx, self._connection_key, connection) + # Store the generator for cleanup + setattr(request.ctx, f"_{self._connection_key}_gen", connection_gen) + else: + # Fallback: create connection directly + pool = await ensure_async_(self.database_config.config.create_pool)() + connection_cm = self.database_config.config.provide_connection(pool) + if hasattr(connection_cm, "__aenter__"): + connection = await connection_cm.__aenter__() + setattr(request.ctx, self._connection_key, connection) + setattr(request.ctx, f"_{self._connection_key}_cm", connection_cm) + else: + connection = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + setattr(request.ctx, self._connection_key, connection) + + logger.debug("Database connection established for request %s", request.id) + + # Create session if provider is available and session doesn't exist + if self.database_config.session_provider and not hasattr(request.ctx, self._session_key): + connection = getattr(request.ctx, self._connection_key) + session_gen = self.database_config.session_provider(connection) + session = await session_gen.__anext__() + setattr(request.ctx, self._session_key, session) + setattr(request.ctx, f"_{self._session_key}_gen", session_gen) + logger.debug("Database session created for request %s", request.id) + + except Exception: + logger.exception("Failed to set up database connection/session for request %s", request.id) + raise + + async def after_response(self, request: Any, response: Any) -> None: + """Handle transaction commit/rollback and cleanup after response processing. + + Args: + request: The Sanic request object. + response: The Sanic response object. + """ + logger.debug("Cleaning up database resources for request %s", request.id) + + # Handle session cleanup + if hasattr(request.ctx, f"_{self._session_key}_gen"): + session_gen = getattr(request.ctx, f"_{self._session_key}_gen") + with contextlib.suppress(Exception): + await session_gen.__anext__() # This should raise StopAsyncIteration + with contextlib.suppress(Exception): + delattr(request.ctx, f"_{self._session_key}_gen") + + if hasattr(request.ctx, self._session_key): + with contextlib.suppress(Exception): + delattr(request.ctx, self._session_key) + + # Handle transaction management + connection = getattr(request.ctx, self._connection_key, None) + if connection is not None: + try: + should_commit = self._should_commit_transaction(response.status) + + if should_commit and hasattr(connection, "commit") and callable(connection.commit): + await ensure_async_(connection.commit)() + logger.debug("Transaction committed for request %s", request.id) + elif hasattr(connection, "rollback") and callable(connection.rollback): + await ensure_async_(connection.rollback)() + logger.debug("Transaction rolled back for request %s", request.id) + except Exception: + logger.exception("Error during transaction handling for request %s", request.id) + # Always try to rollback on exception + if hasattr(connection, "rollback") and callable(connection.rollback): + with contextlib.suppress(Exception): + await ensure_async_(connection.rollback)() + + # Handle connection cleanup + if hasattr(request.ctx, f"_{self._connection_key}_gen"): + connection_gen = getattr(request.ctx, f"_{self._connection_key}_gen") + with contextlib.suppress(Exception): + await connection_gen.__anext__() # This should raise StopAsyncIteration + with contextlib.suppress(Exception): + delattr(request.ctx, f"_{self._connection_key}_gen") + + if hasattr(request.ctx, f"_{self._connection_key}_cm"): + connection_cm = getattr(request.ctx, f"_{self._connection_key}_cm") + with contextlib.suppress(Exception): + await connection_cm.__aexit__(None, None, None) + with contextlib.suppress(Exception): + delattr(request.ctx, f"_{self._connection_key}_cm") + + if hasattr(request.ctx, self._connection_key): + connection = getattr(request.ctx, self._connection_key) + if hasattr(connection, "close") and callable(connection.close): + with contextlib.suppress(Exception): + await ensure_async_(connection.close)() + with contextlib.suppress(Exception): + delattr(request.ctx, self._connection_key) + + logger.debug("Database resources cleaned up for request %s", request.id) + + def _should_commit_transaction(self, status_code: int) -> bool: + """Determine if transaction should be committed based on status code. + + Args: + status_code: HTTP response status code. + + Returns: + True if transaction should be committed, False otherwise. + """ + if self.database_config.commit_mode == "manual": + return False + + http_ok = 200 + http_multiple_choices = 300 + http_bad_request = 400 + + should_commit = False + + if self.database_config.commit_mode == "autocommit": + should_commit = http_ok <= status_code < http_multiple_choices + elif self.database_config.commit_mode == "autocommit_include_redirect": + should_commit = http_ok <= status_code < http_bad_request + + # Apply extra status overrides + if self.database_config.extra_commit_statuses and status_code in self.database_config.extra_commit_statuses: + should_commit = True + elif ( + self.database_config.extra_rollback_statuses + and status_code in self.database_config.extra_rollback_statuses + ): + should_commit = False + + return should_commit diff --git a/sqlspec/extensions/sanic/_providers.py b/sqlspec/extensions/sanic/_providers.py new file mode 100644 index 000000000..57f6ca2c6 --- /dev/null +++ b/sqlspec/extensions/sanic/_providers.py @@ -0,0 +1,106 @@ +"""Provider functions for SQLSpec Sanic integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any, cast + +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable, Callable + + from sqlspec.config import DatabaseConfigProtocol, DriverT + from sqlspec.typing import ConnectionT, PoolT + + +__all__ = ("create_connection_provider", "create_pool_provider", "create_session_provider") + + +def create_pool_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str +) -> "Callable[[], Awaitable[PoolT]]": + """Create provider for database pool access. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + + Returns: + The pool provider function. + """ + + async def provide_pool() -> "PoolT": + """Provide the database pool. + + Returns: + The database connection pool. + """ + db_pool = await ensure_async_(config.create_pool)() + return cast("PoolT", db_pool) + + return provide_pool + + +def create_connection_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str, connection_key: str +) -> "Callable[[], AsyncGenerator[ConnectionT, None]]": + """Create provider for database connections. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + connection_key: The key used to store the connection. + + Returns: + The connection provider function. + """ + + async def provide_connection() -> "AsyncGenerator[ConnectionT, None]": + """Provide a database connection. + + Yields: + Database connection instance. + """ + db_pool = await ensure_async_(config.create_pool)() + + try: + connection_cm = config.provide_connection(db_pool) + + # Handle both context managers and direct connections + if hasattr(connection_cm, "__aenter__"): + async with connection_cm as conn: + yield cast("ConnectionT", conn) + else: + conn = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + yield cast("ConnectionT", conn) + finally: + with contextlib.suppress(Exception): + await ensure_async_(config.close_pool)() + + return provide_connection + + +def create_session_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str +) -> "Callable[[ConnectionT], AsyncGenerator[DriverT, None]]": + """Create provider for database sessions/drivers. + + Args: + config: The database configuration object. + connection_key: The key used to access the connection. + + Returns: + The session provider function. + """ + + async def provide_session(connection: "ConnectionT") -> "AsyncGenerator[DriverT, None]": + """Provide a database session/driver. + + Args: + connection: The database connection. + + Yields: + Database driver/session instance. + """ + yield cast("DriverT", config.driver_type(connection=connection)) + + return provide_session diff --git a/sqlspec/extensions/sanic/cli.py b/sqlspec/extensions/sanic/cli.py new file mode 100644 index 000000000..f1e6c322d --- /dev/null +++ b/sqlspec/extensions/sanic/cli.py @@ -0,0 +1,353 @@ +"""CLI integration for SQLSpec Sanic extension.""" + +import sys +from pathlib import Path +from typing import TYPE_CHECKING, Optional + +import click + +from sqlspec.utils.logging import get_logger + +if TYPE_CHECKING: + from sanic import Sanic + + from sqlspec.extensions.sanic.config import DatabaseConfig + from sqlspec.extensions.sanic.extension import SQLSpec + +logger = get_logger("extensions.sanic.cli") + +__all__ = ("database_group", "init_database_commands") + + +@click.group(name="database") +def database_group() -> None: + """Database management commands for SQLSpec Sanic integration.""" + + +@database_group.command("init") +@click.option("--config", "-c", help="Configuration module path") +@click.option("--app", "-a", help="Sanic application path") +def init_database(config: Optional[str], app: Optional[str]) -> None: + """Initialize database schemas and tables. + + Args: + config: Path to configuration module. + app: Path to Sanic application. + """ + click.echo("Initializing database schemas...") + + try: + sqlspec_instance = _get_sqlspec_instance(config, app) + if sqlspec_instance is None: + click.echo("Error: Could not find SQLSpec instance", err=True) + sys.exit(1) + + # Initialize all configured databases + for db_config in sqlspec_instance.config: + _initialize_database(db_config) + click.echo(f"✓ Initialized database: {db_config.connection_key}") + + click.echo("Database initialization completed successfully!") + + except Exception as e: + logger.exception("Database initialization failed") + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +@database_group.command("migrate") +@click.option("--config", "-c", help="Configuration module path") +@click.option("--app", "-a", help="Sanic application path") +@click.option("--revision", "-r", help="Target revision") +@click.option("--sql", is_flag=True, help="Generate SQL only") +def migrate_database(config: Optional[str], app: Optional[str], revision: Optional[str], sql: bool) -> None: + """Run database migrations. + + Args: + config: Path to configuration module. + app: Path to Sanic application. + revision: Target revision to migrate to. + sql: Generate SQL scripts only without executing. + """ + click.echo("Running database migrations...") + + try: + sqlspec_instance = _get_sqlspec_instance(config, app) + if sqlspec_instance is None: + click.echo("Error: Could not find SQLSpec instance", err=True) + sys.exit(1) + + # Run migrations for all configured databases + for db_config in sqlspec_instance.config: + _run_migrations(db_config, revision, sql) + click.echo(f"✓ Migrated database: {db_config.connection_key}") + + click.echo("Database migrations completed successfully!") + + except Exception as e: + logger.exception("Database migration failed") + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +@database_group.command("upgrade") +@click.option("--config", "-c", help="Configuration module path") +@click.option("--app", "-a", help="Sanic application path") +@click.option("--revision", "-r", help="Target revision", default="head") +def upgrade_database(config: Optional[str], app: Optional[str], revision: str) -> None: + """Upgrade database to latest or specified revision. + + Args: + config: Path to configuration module. + app: Path to Sanic application. + revision: Target revision to upgrade to. + """ + click.echo(f"Upgrading database to revision: {revision}") + + try: + sqlspec_instance = _get_sqlspec_instance(config, app) + if sqlspec_instance is None: + click.echo("Error: Could not find SQLSpec instance", err=True) + sys.exit(1) + + for db_config in sqlspec_instance.config: + _upgrade_database(db_config, revision) + click.echo(f"✓ Upgraded database: {db_config.connection_key}") + + click.echo("Database upgrade completed successfully!") + + except Exception as e: + logger.exception("Database upgrade failed") + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +@database_group.command("downgrade") +@click.option("--config", "-c", help="Configuration module path") +@click.option("--app", "-a", help="Sanic application path") +@click.option("--revision", "-r", required=True, help="Target revision") +def downgrade_database(config: Optional[str], app: Optional[str], revision: str) -> None: + """Downgrade database to specified revision. + + Args: + config: Path to configuration module. + app: Path to Sanic application. + revision: Target revision to downgrade to. + """ + click.echo(f"Downgrading database to revision: {revision}") + + try: + sqlspec_instance = _get_sqlspec_instance(config, app) + if sqlspec_instance is None: + click.echo("Error: Could not find SQLSpec instance", err=True) + sys.exit(1) + + for db_config in sqlspec_instance.config: + _downgrade_database(db_config, revision) + click.echo(f"✓ Downgraded database: {db_config.connection_key}") + + click.echo("Database downgrade completed successfully!") + + except Exception as e: + logger.exception("Database downgrade failed") + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +@database_group.command("seed") +@click.option("--config", "-c", help="Configuration module path") +@click.option("--app", "-a", help="Sanic application path") +@click.option("--file", "-f", help="Seed file path") +def seed_database(config: Optional[str], app: Optional[str], file: Optional[str]) -> None: + """Seed database with initial data. + + Args: + config: Path to configuration module. + app: Path to Sanic application. + file: Path to seed file. + """ + click.echo("Seeding database with initial data...") + + try: + sqlspec_instance = _get_sqlspec_instance(config, app) + if sqlspec_instance is None: + click.echo("Error: Could not find SQLSpec instance", err=True) + sys.exit(1) + + seed_file_path = file or "seeds/initial_data.sql" + + for db_config in sqlspec_instance.config: + _seed_database(db_config, seed_file_path) + click.echo(f"✓ Seeded database: {db_config.connection_key}") + + click.echo("Database seeding completed successfully!") + + except Exception as e: + logger.exception("Database seeding failed") + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +@database_group.command("inspect") +@click.option("--config", "-c", help="Configuration module path") +@click.option("--app", "-a", help="Sanic application path") +@click.option("--table", "-t", help="Specific table to inspect") +def inspect_database(config: Optional[str], app: Optional[str], table: Optional[str]) -> None: + """Inspect database schema and structure. + + Args: + config: Path to configuration module. + app: Path to Sanic application. + table: Specific table to inspect. + """ + click.echo("Inspecting database schema...") + + try: + sqlspec_instance = _get_sqlspec_instance(config, app) + if sqlspec_instance is None: + click.echo("Error: Could not find SQLSpec instance", err=True) + sys.exit(1) + + for db_config in sqlspec_instance.config: + schema_info = _inspect_database_schema(db_config, table) + click.echo(f"Database: {db_config.connection_key}") + click.echo(schema_info) + click.echo("") + + except Exception as e: + logger.exception("Database inspection failed") + click.echo(f"Error: {e}", err=True) + sys.exit(1) + + +def init_database_commands(app: "Sanic") -> None: + """Initialize database CLI commands for Sanic application. + + This function can be called during Sanic application setup to register + database commands with Sanic's CLI system if available. + + Args: + app: The Sanic application instance. + """ + # Store reference to app for CLI commands + if not hasattr(app.ctx, "cli_commands"): + app.ctx.cli_commands = [] + + app.ctx.cli_commands.append(database_group) + logger.debug("Database CLI commands initialized for Sanic app") + + +def _get_sqlspec_instance(config_path: Optional[str], app_path: Optional[str]) -> "Optional[SQLSpec]": + """Get SQLSpec instance from configuration or application. + + Args: + config_path: Path to configuration module. + app_path: Path to Sanic application. + + Returns: + SQLSpec instance if found, None otherwise. + """ + # Try to get SQLSpec from application context first + if app_path: + try: + app_module, app_name = app_path.rsplit(":", 1) + module = __import__(app_module, fromlist=[app_name]) + app = getattr(module, app_name) + return getattr(app.ctx, "sqlspec", None) + except Exception: + logger.debug("Could not load SQLSpec from app path: %s", app_path) + + # Try to load from configuration module + if config_path: + try: + config_module = __import__(config_path, fromlist=["sqlspec"]) + return getattr(config_module, "sqlspec", None) + except Exception: + logger.debug("Could not load SQLSpec from config path: %s", config_path) + + # Try to auto-discover + for possible_path in ["app:app", "main:app", "server:app"]: + try: + module_path, app_name = possible_path.rsplit(":", 1) + module = __import__(module_path, fromlist=[app_name]) + app = getattr(module, app_name) + sqlspec = getattr(app.ctx, "sqlspec", None) + if sqlspec: + return sqlspec + except Exception: + continue + + return None + + +def _initialize_database(db_config: "DatabaseConfig") -> None: + """Initialize database schema for given configuration. + + Args: + db_config: Database configuration instance. + """ + # This would integrate with SQLSpec's migration system + # For now, this is a placeholder for the actual implementation + logger.info("Initializing database schema for %s", db_config.connection_key) + + +def _run_migrations(db_config: "DatabaseConfig", revision: Optional[str], sql_only: bool) -> None: + """Run migrations for given database configuration. + + Args: + db_config: Database configuration instance. + revision: Target revision. + sql_only: Generate SQL only without executing. + """ + # This would integrate with SQLSpec's migration system + logger.info("Running migrations for %s to revision %s", db_config.connection_key, revision or "latest") + + +def _upgrade_database(db_config: "DatabaseConfig", revision: str) -> None: + """Upgrade database to specified revision. + + Args: + db_config: Database configuration instance. + revision: Target revision. + """ + logger.info("Upgrading database %s to revision %s", db_config.connection_key, revision) + + +def _downgrade_database(db_config: "DatabaseConfig", revision: str) -> None: + """Downgrade database to specified revision. + + Args: + db_config: Database configuration instance. + revision: Target revision. + """ + logger.info("Downgrading database %s to revision %s", db_config.connection_key, revision) + + +def _seed_database(db_config: "DatabaseConfig", seed_file: str) -> None: + """Seed database with data from file. + + Args: + db_config: Database configuration instance. + seed_file: Path to seed file. + """ + seed_path = Path(seed_file) + if not seed_path.exists(): + msg = f"Seed file not found: {seed_file}" + raise FileNotFoundError(msg) + + logger.info("Seeding database %s from %s", db_config.connection_key, seed_file) + + +def _inspect_database_schema(db_config: "DatabaseConfig", table: Optional[str]) -> str: + """Inspect database schema and return information. + + Args: + db_config: Database configuration instance. + table: Specific table to inspect. + + Returns: + String containing schema information. + """ + if table: + return f"Schema information for table '{table}' in {db_config.connection_key}" + return f"Schema information for database {db_config.connection_key}" diff --git a/sqlspec/extensions/sanic/config.py b/sqlspec/extensions/sanic/config.py new file mode 100644 index 000000000..acf251401 --- /dev/null +++ b/sqlspec/extensions/sanic/config.py @@ -0,0 +1,170 @@ +"""Configuration classes for SQLSpec Sanic integration.""" + +import contextlib +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union + +from sqlspec.exceptions import ImproperConfigurationError + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable + + from sanic import Sanic + + from sqlspec.config import AsyncConfigT, DriverT, SyncConfigT + from sqlspec.typing import ConnectionT, PoolT + + +CommitMode = Literal["manual", "autocommit", "autocommit_include_redirect"] +DEFAULT_COMMIT_MODE: CommitMode = "manual" +DEFAULT_CONNECTION_KEY = "db_connection" +DEFAULT_POOL_KEY = "db_pool" +DEFAULT_SESSION_KEY = "db_session" + +__all__ = ( + "DEFAULT_COMMIT_MODE", + "DEFAULT_CONNECTION_KEY", + "DEFAULT_POOL_KEY", + "DEFAULT_SESSION_KEY", + "AsyncDatabaseConfig", + "CommitMode", + "DatabaseConfig", + "SyncDatabaseConfig", +) + + +@dataclass +class DatabaseConfig: + """Configuration for SQLSpec database integration with Sanic applications.""" + + config: "Union[SyncConfigT, AsyncConfigT]" = field() # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] + connection_key: str = field(default=DEFAULT_CONNECTION_KEY) + pool_key: str = field(default=DEFAULT_POOL_KEY) + session_key: str = field(default=DEFAULT_SESSION_KEY) + commit_mode: "CommitMode" = field(default=DEFAULT_COMMIT_MODE) + extra_commit_statuses: "Optional[set[int]]" = field(default=None) + extra_rollback_statuses: "Optional[set[int]]" = field(default=None) + enable_middleware: bool = field(default=True) + + # Generated providers + connection_provider: "Optional[Callable[[], AsyncGenerator[ConnectionT, None]]]" = field( + init=False, repr=False, hash=False, default=None + ) + pool_provider: "Optional[Callable[[], Awaitable[PoolT]]]" = field(init=False, repr=False, hash=False, default=None) + session_provider: "Optional[Callable[[ConnectionT], AsyncGenerator[DriverT, None]]]" = field( + init=False, repr=False, hash=False, default=None + ) + + # Generated unique context keys for Sanic + engine_key: str = field(init=False, repr=False, hash=False) + session_maker_key: str = field(init=False, repr=False, hash=False) + + def __post_init__(self) -> None: + """Initialize providers and keys after object creation.""" + if not self.config.supports_connection_pooling and self.pool_key == DEFAULT_POOL_KEY: # type: ignore[union-attr,unused-ignore] + self.pool_key = f"_{self.pool_key}_{id(self.config)}" + + # Generate unique context keys + self.engine_key = f"engine_{id(self.config)}" + self.session_maker_key = f"session_maker_{id(self.config)}" + + # Validate commit mode + if self.commit_mode not in {"manual", "autocommit", "autocommit_include_redirect"}: + msg = f"Invalid commit mode: {self.commit_mode}" + raise ImproperConfigurationError(detail=msg) + + # Validate status code sets + if ( + self.extra_commit_statuses + and self.extra_rollback_statuses + and self.extra_commit_statuses & self.extra_rollback_statuses + ): + msg = "Extra rollback statuses and commit statuses must not share any status codes" + raise ImproperConfigurationError(msg) + + def init_app(self, app: "Sanic") -> None: + """Initialize SQLSpec configuration for Sanic application. + + Args: + app: The Sanic application instance. + """ + from sqlspec.extensions.sanic._providers import ( + create_connection_provider, + create_pool_provider, + create_session_provider, + ) + + # Create providers + self.pool_provider = create_pool_provider(self.config, self.pool_key) + self.connection_provider = create_connection_provider(self.config, self.pool_key, self.connection_key) + self.session_provider = create_session_provider(self.config, self.connection_key) + + # Add middleware if enabled + if self.enable_middleware: + from sqlspec.extensions.sanic._middleware import SessionMiddleware + + middleware = SessionMiddleware(self) + app.middleware("request")(middleware.before_request) + app.middleware("response")(middleware.after_response) + + # Add startup and shutdown listeners + app.listener("before_server_start")(self._startup_handler(app)) + app.listener("after_server_stop")(self._shutdown_handler(app)) + + def _startup_handler(self, app: "Sanic") -> "Callable[[Sanic, Any], Awaitable[None]]": + """Create startup handler for database pool initialization. + + Args: + app: The Sanic application instance. + + Returns: + Startup handler function. + """ + + async def startup(app: "Sanic", loop: Any) -> None: + """Initialize database pool on startup. + + Args: + app: The Sanic application instance. + loop: The event loop. + """ + from sqlspec.utils.sync_tools import ensure_async_ + + db_pool = await ensure_async_(self.config.create_pool)() + app.ctx.__dict__[self.pool_key] = db_pool + + return startup + + def _shutdown_handler(self, app: "Sanic") -> "Callable[[Sanic, Any], Awaitable[None]]": + """Create shutdown handler for database pool cleanup. + + Args: + app: The Sanic application instance. + + Returns: + Shutdown handler function. + """ + + async def shutdown(app: "Sanic", loop: Any) -> None: + """Clean up database pool on shutdown. + + Args: + app: The Sanic application instance. + loop: The event loop. + """ + from sqlspec.utils.sync_tools import ensure_async_ + + app.ctx.__dict__.pop(self.pool_key, None) + with contextlib.suppress(Exception): + await ensure_async_(self.config.close_pool)() + + return shutdown + + +# Add typed subclasses for better developer experience +class SyncDatabaseConfig(DatabaseConfig): + """Sync-specific DatabaseConfig with better typing for Sanic applications.""" + + +class AsyncDatabaseConfig(DatabaseConfig): + """Async-specific DatabaseConfig with better typing for Sanic applications.""" diff --git a/sqlspec/extensions/sanic/extension.py b/sqlspec/extensions/sanic/extension.py new file mode 100644 index 000000000..380404a14 --- /dev/null +++ b/sqlspec/extensions/sanic/extension.py @@ -0,0 +1,313 @@ +"""SQLSpec extension for Sanic applications.""" + +from __future__ import annotations + +from contextlib import asynccontextmanager +from typing import TYPE_CHECKING, Any, overload + +from sqlspec.base import SQLSpec as SQLSpecBase +from sqlspec.config import AsyncConfigT, DatabaseConfigProtocol, DriverT, SyncConfigT +from sqlspec.extensions.sanic.config import DatabaseConfig +from sqlspec.utils.logging import get_logger + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + from sanic import Sanic + + from sqlspec.loader import SQLFileLoader + from sqlspec.typing import ConnectionT, PoolT + +logger = get_logger("extensions.sanic") + +__all__ = ("SQLSpec",) + + +class SQLSpec(SQLSpecBase): + """SQLSpec integration for Sanic applications.""" + + __slots__ = ("_app", "_configs") + + def __init__( + self, + config: SyncConfigT | AsyncConfigT | DatabaseConfig | list[DatabaseConfig], + *, + loader: SQLFileLoader | None = None, + ) -> None: + """Initialize SQLSpec for Sanic. + + Args: + config: Database configuration(s) for SQLSpec. + loader: Optional SQL file loader instance. + """ + super().__init__(loader=loader) + self._app: Sanic | None = None + + if isinstance(config, DatabaseConfigProtocol): + self._configs: list[DatabaseConfig] = [DatabaseConfig(config=config)] + elif isinstance(config, DatabaseConfig): + self._configs = [config] + else: + self._configs = config + + @property + def config(self) -> list[DatabaseConfig]: + """Return the database configurations. + + Returns: + List of database configurations. + """ + return self._configs + + def init_app(self, app: Sanic) -> None: + """Initialize SQLSpec with Sanic application. + + Args: + app: The Sanic application instance. + """ + self._app = app + + # Initialize each database configuration + for db_config in self._configs: + # Generate unique annotation type for this config + config_type = type(db_config.config) + db_config.annotation = config_type # type: ignore[attr-defined] + + # Initialize with the app + db_config.init_app(app) + + # Store reference in app context + app.ctx.sqlspec = self + + def get_session(self, request: Any, config: SyncConfigT | AsyncConfigT | None = None) -> DriverT: + """Get a database session for the given configuration from request context. + + Args: + request: The Sanic request object. + config: Configuration instance to get session for. If None, uses first config. + + Returns: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + """ + if config is None: + if not self._configs: + msg = "No database configurations available" + raise RuntimeError(msg) + config = self._configs[0].config + + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, getattr(cfg, "annotation", None)): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Check if we already have a session in request context + session = getattr(request.ctx, db_config.session_key, None) + if session is not None: + return session + + # Check if we have a connection in request context + connection = getattr(request.ctx, db_config.connection_key, None) + if connection is None: + msg = f"No connection available for {config}. Ensure middleware is enabled." + raise RuntimeError(msg) + + # Create driver directly using connection + return db_config.config.driver_type(connection=connection) # type: ignore[attr-defined] + + def get_engine(self, request: Any, config: SyncConfigT | AsyncConfigT | None = None) -> Any: + """Get database engine from request context. + + Args: + request: The Sanic request object. + config: Configuration instance to get engine for. + + Returns: + Database engine instance. + + Raises: + KeyError: If configuration is not found. + """ + if config is None: + if not self._configs: + msg = "No database configurations available" + raise RuntimeError(msg) + config = self._configs[0].config + + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, getattr(cfg, "annotation", None)): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + return getattr(request.app.ctx, db_config.pool_key, None) + + @overload + def provide_session(self, config: SyncConfigT) -> AsyncGenerator[DriverT, None]: ... + + @overload + def provide_session(self, config: AsyncConfigT) -> AsyncGenerator[DriverT, None]: ... + + @overload + def provide_session(self, config: type[SyncConfigT | AsyncConfigT]) -> AsyncGenerator[DriverT, None]: ... + + @asynccontextmanager + async def provide_session( + self, config: SyncConfigT | AsyncConfigT | type[SyncConfigT | AsyncConfigT] + ) -> AsyncGenerator[DriverT, None]: + """Provide a database session for the given configuration. + + Args: + config: Configuration instance or type to get session for. + + Yields: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, getattr(cfg, "annotation", None)): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get connection and create session + if db_config.connection_provider: + async with db_config.connection_provider() as connection: + if db_config.session_provider: + async with db_config.session_provider(connection) as session: + yield session + else: + # Fallback: create driver directly + yield db_config.config.driver_type(connection=connection) # type: ignore[attr-defined] + else: + # Fallback: use config's provide_session method + with db_config.config.provide_session() as session: + yield session + + @overload + def provide_connection(self, config: SyncConfigT) -> AsyncGenerator[ConnectionT, None]: ... + + @overload + def provide_connection(self, config: AsyncConfigT) -> AsyncGenerator[ConnectionT, None]: ... + + @overload + def provide_connection(self, config: type[SyncConfigT | AsyncConfigT]) -> AsyncGenerator[ConnectionT, None]: ... + + @asynccontextmanager + async def provide_connection( + self, config: SyncConfigT | AsyncConfigT | type[SyncConfigT | AsyncConfigT] + ) -> AsyncGenerator[ConnectionT, None]: + """Provide a database connection for the given configuration. + + Args: + config: Configuration instance or type to get connection for. + + Yields: + Database connection instance. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, getattr(cfg, "annotation", None)): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get connection + if db_config.connection_provider: + async with db_config.connection_provider() as connection: + yield connection + else: + # Fallback: use config's provide_connection method + with db_config.config.provide_connection() as connection: + yield connection + + @overload + def provide_pool(self, config: SyncConfigT) -> PoolT: ... + + @overload + def provide_pool(self, config: AsyncConfigT) -> PoolT: ... + + @overload + def provide_pool(self, config: type[SyncConfigT | AsyncConfigT]) -> PoolT: ... + + async def provide_pool(self, config: SyncConfigT | AsyncConfigT | type[SyncConfigT | AsyncConfigT]) -> PoolT: + """Provide a database pool for the given configuration. + + Args: + config: Configuration instance or type to get pool for. + + Returns: + Database connection pool. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, getattr(cfg, "annotation", None)): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get pool + if db_config.pool_provider: + return await db_config.pool_provider() + # Fallback: create pool directly + from sqlspec.utils.sync_tools import ensure_async_ + + return await ensure_async_(db_config.config.create_pool)() + + def get_annotation( + self, key: str | SyncConfigT | AsyncConfigT | type[SyncConfigT | AsyncConfigT] + ) -> type[SyncConfigT | AsyncConfigT]: + """Return the annotation for the given configuration. + + Args: + key: The configuration instance, type, or key to lookup. + + Returns: + The annotation for the configuration. + + Raises: + KeyError: If no configuration is found for the given key. + """ + for cfg in self._configs: + annotation = getattr(cfg, "annotation", None) + if key in (cfg.config, annotation, cfg.connection_key, cfg.pool_key): + if annotation is None: + msg = "Annotation not set for configuration. Ensure the extension has been initialized." + raise AttributeError(msg) + return annotation + msg = f"No configuration found for {key}" + raise KeyError(msg) diff --git a/sqlspec/extensions/sanic/providers.py b/sqlspec/extensions/sanic/providers.py new file mode 100644 index 000000000..0f8afcdd3 --- /dev/null +++ b/sqlspec/extensions/sanic/providers.py @@ -0,0 +1,157 @@ +"""Provider system for SQLSpec Sanic integration. + +Provides connection, pool, and session providers for Sanic applications. +""" + +from typing import TYPE_CHECKING, Callable, Optional, Union, cast + +from sqlspec.utils.logging import get_logger + +if TYPE_CHECKING: + from sanic import Request + + from sqlspec.config import AsyncConfigT, DriverT, SyncConfigT + from sqlspec.extensions.sanic.config import DatabaseConfig + from sqlspec.typing import ConnectionT, PoolT + +logger = get_logger("extensions.sanic.providers") + +__all__ = ( + "provide_connection", + "provide_pool", + "provide_session", +) + + + +def provide_connection( + config: "Optional[Union[SyncConfigT, AsyncConfigT, DatabaseConfig]]" = None, +) -> "Callable[[Request], ConnectionT]": + """Create a connection provider for direct database connection access. + + Args: + config: Optional database configuration. + + Returns: + A provider function that returns database connections. + """ + def provider(request: "Request") -> "ConnectionT": + """Provide database connection from request context. + + Args: + request: The Sanic request object. + + Returns: + Database connection instance. + """ + sqlspec = getattr(request.app.ctx, "sqlspec", None) + if sqlspec is None: + msg = "SQLSpec not initialized in application context" + raise RuntimeError(msg) + + # Determine which config to use + active_config = config + if active_config is None: + if not sqlspec._configs: + msg = "No database configurations available" + raise RuntimeError(msg) + active_config = sqlspec._configs[0] + + # Find the database config for this configuration + db_config = None + for cfg in sqlspec._configs: + if active_config in (cfg.config, getattr(cfg, "annotation", None)): + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {active_config}" + raise KeyError(msg) + + # Get connection from request context + connection = getattr(request.ctx, db_config.connection_key, None) + if connection is None: + msg = f"No connection available for {active_config}. Ensure middleware is enabled." + raise RuntimeError(msg) + + return cast("ConnectionT", connection) + + return provider + + +def provide_pool( + config: "Optional[Union[SyncConfigT, AsyncConfigT, DatabaseConfig]]" = None, +) -> "Callable[[Request], PoolT]": + """Create a pool provider for database connection pool access. + + Args: + config: Optional database configuration. + + Returns: + A provider function that returns database connection pools. + """ + def provider(request: "Request") -> "PoolT": + """Provide database connection pool from app context. + + Args: + request: The Sanic request object. + + Returns: + Database connection pool instance. + """ + sqlspec = getattr(request.app.ctx, "sqlspec", None) + if sqlspec is None: + msg = "SQLSpec not initialized in application context" + raise RuntimeError(msg) + + # Determine which config to use + active_config = config + if active_config is None: + if not sqlspec._configs: + msg = "No database configurations available" + raise RuntimeError(msg) + active_config = sqlspec._configs[0] + + # Get pool from app context + pool = sqlspec.get_engine(request, active_config.config if hasattr(active_config, "config") else active_config) + if pool is None: + msg = f"No pool available for {active_config}" + raise RuntimeError(msg) + + return cast("PoolT", pool) + + return provider + + +def provide_session( + config: "Optional[Union[SyncConfigT, AsyncConfigT, DatabaseConfig]]" = None, +) -> "Callable[[Request], DriverT]": + """Create a session provider for database session/driver access. + + Args: + config: Optional database configuration. + + Returns: + A provider function that returns database sessions/drivers. + """ + def provider(request: "Request") -> "DriverT": + """Provide database session/driver from request context. + + Args: + request: The Sanic request object. + + Returns: + Database session/driver instance. + """ + sqlspec = getattr(request.app.ctx, "sqlspec", None) + if sqlspec is None: + msg = "SQLSpec not initialized in application context" + raise RuntimeError(msg) + + # Get session using SQLSpec's method + session = sqlspec.get_session(request, config.config if hasattr(config, "config") else config) + return cast("DriverT", session) + + return provider + + diff --git a/sqlspec/extensions/starlette/__init__.py b/sqlspec/extensions/starlette/__init__.py new file mode 100644 index 000000000..a1e0c98ef --- /dev/null +++ b/sqlspec/extensions/starlette/__init__.py @@ -0,0 +1,20 @@ +from sqlspec.extensions.starlette._middleware import SessionMiddleware, create_session_middleware +from sqlspec.extensions.starlette.config import DatabaseConfig +from sqlspec.extensions.starlette.extension import SQLSpec +from sqlspec.extensions.starlette.providers import ( + FilterConfig, + create_filter_dependencies, + provide_filters, + provide_service, +) + +__all__ = ( + "DatabaseConfig", + "FilterConfig", + "SQLSpec", + "SessionMiddleware", + "create_filter_dependencies", + "create_session_middleware", + "provide_filters", + "provide_service", +) diff --git a/sqlspec/extensions/starlette/_middleware.py b/sqlspec/extensions/starlette/_middleware.py new file mode 100644 index 000000000..6d2dff5c7 --- /dev/null +++ b/sqlspec/extensions/starlette/_middleware.py @@ -0,0 +1,229 @@ +"""Middleware for SQLSpec Starlette integration.""" + +import contextlib +import uuid +from typing import TYPE_CHECKING, Any, Optional + +from starlette.middleware.base import BaseHTTPMiddleware + +from sqlspec.utils.correlation import set_correlation_id +from sqlspec.utils.logging import get_logger +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + + from starlette.requests import Request + from starlette.responses import Response + + from sqlspec.extensions.starlette.config import CommitMode, DatabaseConfig + +logger = get_logger("extensions.starlette.middleware") + + +__all__ = ("SessionMiddleware", "create_session_middleware") + + +class SessionMiddleware(BaseHTTPMiddleware): + """Middleware for managing database sessions and transactions.""" + + def __init__( + self, + app: Any, + config: "DatabaseConfig", + commit_mode: "CommitMode" = "manual", + extra_commit_statuses: "Optional[set[int]]" = None, + extra_rollback_statuses: "Optional[set[int]]" = None, + ) -> None: + """Initialize session middleware. + + Args: + app: The ASGI application. + config: Database configuration instance. + commit_mode: Transaction commit behavior. + extra_commit_statuses: Additional status codes that trigger commits. + extra_rollback_statuses: Additional status codes that trigger rollbacks. + """ + super().__init__(app) + self.config = config + self.commit_mode = commit_mode + self.extra_commit_statuses = extra_commit_statuses or set() + self.extra_rollback_statuses = extra_rollback_statuses or set() + + async def dispatch(self, request: "Request", call_next: "Callable[[Request], Awaitable[Response]]") -> "Response": + """Handle request with session management. + + Args: + request: The incoming request. + call_next: The next middleware or endpoint. + + Returns: + The response from the application. + """ + # Set up correlation ID for request tracking + correlation_id = request.headers.get("x-correlation-id") or str(uuid.uuid4()) + set_correlation_id(correlation_id) + + if not self.config.connection_provider or not self.config.session_provider: + # If no providers, just pass through + logger.debug("No connection or session provider found, skipping middleware") + return await call_next(request) + + # Get pool from app state + pool = getattr(request.app.state, self.config.pool_key, None) + if pool is None: + logger.warning("Database pool '%s' not found in app state", self.config.pool_key) + return await call_next(request) + + # Get connection from provider + connection_gen = self.config.connection_provider() + try: + connection = await connection_gen.__anext__() + except StopAsyncIteration: + logger.exception("Connection provider exhausted") + return await call_next(request) + + # Store connection in request state + request.state.__dict__[self.config.connection_key] = connection + + # Get session from provider + session_gen = self.config.session_provider(connection) + session = None + + try: + session = await session_gen.__anext__() + + # Store session in request state + request.state.__dict__[self.config.session_key] = session + + logger.debug("Database session established for connection key: %s", self.config.connection_key) + + response = await call_next(request) + + # Handle transaction based on commit mode and response status + if self.commit_mode != "manual": + await self._handle_transaction(session or connection, response.status_code) + + except Exception: + logger.exception("Exception in request processing") + # Rollback on exception + await self._rollback_transaction(session or connection) + raise + else: + return response + finally: + # Clean up session + if session is not None: + with contextlib.suppress(StopAsyncIteration): + await session_gen.__anext__() + + # Clean up connection + with contextlib.suppress(StopAsyncIteration): + await connection_gen.__anext__() + + # Clean up request state + request.state.__dict__.pop(self.config.connection_key, None) + request.state.__dict__.pop(self.config.session_key, None) + + logger.debug("Database session and connection cleaned up") + + async def _handle_transaction(self, session_or_connection: Any, status_code: int) -> None: + """Handle transaction commit/rollback based on status code. + + Args: + session_or_connection: The database session or connection. + status_code: HTTP response status code. + """ + http_ok = 200 + http_multiple_choices = 300 + http_bad_request = 400 + + should_commit = False + + if self.commit_mode == "autocommit": + # Commit on 2xx status codes + should_commit = http_ok <= status_code < http_multiple_choices + elif self.commit_mode == "autocommit_include_redirect": + # Commit on 2xx and 3xx status codes + should_commit = http_ok <= status_code < http_bad_request + + # Apply extra status overrides + if status_code in self.extra_commit_statuses: + should_commit = True + elif status_code in self.extra_rollback_statuses: + should_commit = False + + # Execute transaction action + if should_commit: + await self._commit_transaction(session_or_connection) + logger.debug("Transaction committed for status code: %s", status_code) + else: + await self._rollback_transaction(session_or_connection) + logger.debug("Transaction rolled back for status code: %s", status_code) + + async def _commit_transaction(self, session_or_connection: Any) -> None: + """Commit transaction on session or connection. + + Args: + session_or_connection: The database session or connection. + """ + try: + if hasattr(session_or_connection, "commit") and callable(session_or_connection.commit): + await ensure_async_(session_or_connection.commit)() + elif hasattr(session_or_connection, "connection"): + # Try to commit on underlying connection if session doesn't have commit + conn = session_or_connection.connection + if hasattr(conn, "commit") and callable(conn.commit): + await ensure_async_(conn.commit)() + except Exception: + logger.exception("Error committing transaction") + # Try to rollback after failed commit + await self._rollback_transaction(session_or_connection) + + async def _rollback_transaction(self, session_or_connection: Any) -> None: + """Rollback transaction on session or connection. + + Args: + session_or_connection: The database session or connection. + """ + try: + if hasattr(session_or_connection, "rollback") and callable(session_or_connection.rollback): + await ensure_async_(session_or_connection.rollback)() + elif hasattr(session_or_connection, "connection"): + # Try to rollback on underlying connection if session doesn't have rollback + conn = session_or_connection.connection + if hasattr(conn, "rollback") and callable(conn.rollback): + await ensure_async_(conn.rollback)() + except Exception: + logger.exception("Error rolling back transaction") + + +def create_session_middleware( + config: "DatabaseConfig", + commit_mode: "CommitMode" = "manual", + extra_commit_statuses: "Optional[set[int]]" = None, + extra_rollback_statuses: "Optional[set[int]]" = None, +) -> type[SessionMiddleware]: + """Create a session middleware class. + + Args: + config: Database configuration instance. + commit_mode: Transaction commit behavior. + extra_commit_statuses: Additional status codes that trigger commits. + extra_rollback_statuses: Additional status codes that trigger rollbacks. + + Returns: + Configured session middleware class. + """ + + class ConfiguredSessionMiddleware(SessionMiddleware): + def __init__(self, app: Any) -> None: + super().__init__( + app=app, + config=config, + commit_mode=commit_mode, + extra_commit_statuses=extra_commit_statuses, + extra_rollback_statuses=extra_rollback_statuses, + ) + + return ConfiguredSessionMiddleware diff --git a/sqlspec/extensions/starlette/_providers.py b/sqlspec/extensions/starlette/_providers.py new file mode 100644 index 000000000..901f43955 --- /dev/null +++ b/sqlspec/extensions/starlette/_providers.py @@ -0,0 +1,117 @@ +"""Provider functions for SQLSpec Starlette integration.""" + +import contextlib +from typing import TYPE_CHECKING, Any, cast + +from sqlspec.utils.sync_tools import ensure_async_ + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable, Callable + + from sqlspec.config import DatabaseConfigProtocol, DriverT + from sqlspec.typing import ConnectionT, PoolT + + +__all__ = ("create_connection_provider", "create_pool_provider", "create_session_provider") + + +def create_pool_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str +) -> "Callable[[], Awaitable[PoolT]]": + """Create provider for database pool access. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + + Returns: + The pool provider function. + """ + + async def provide_pool() -> "PoolT": + """Provide the database pool. + + Returns: + The database connection pool. + + Raises: + ImproperConfigurationError: If the pool is not properly initialized. + """ + # For Starlette, we would need access to app.state + # This is a simplified version - in practice you'd need to pass app state + db_pool = await ensure_async_(config.create_pool)() + return cast("PoolT", db_pool) + + return provide_pool + + +def create_connection_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", pool_key: str, connection_key: str +) -> "Callable[[], AsyncGenerator[ConnectionT, None]]": + """Create provider for database connections. + + Args: + config: The database configuration object. + pool_key: The key used to store the connection pool. + connection_key: The key used to store the connection. + + Returns: + The connection provider function. + """ + + async def provide_connection() -> "AsyncGenerator[ConnectionT, None]": + """Provide a database connection. + + Yields: + Database connection instance. + + Raises: + ImproperConfigurationError: If the pool is not found. + """ + # Create a temporary pool for this connection + # In practice, you'd get this from app.state + db_pool = await ensure_async_(config.create_pool)() + + try: + connection_cm = config.provide_connection(db_pool) + + # Handle both context managers and direct connections + if hasattr(connection_cm, "__aenter__"): + async with connection_cm as conn: + yield cast("ConnectionT", conn) + else: + conn = await connection_cm if hasattr(connection_cm, "__await__") else connection_cm + yield cast("ConnectionT", conn) + finally: + # Clean up pool + with contextlib.suppress(Exception): + await ensure_async_(config.close_pool)() + + return provide_connection + + +def create_session_provider( + config: "DatabaseConfigProtocol[Any, Any, Any]", connection_key: str +) -> "Callable[[ConnectionT], AsyncGenerator[DriverT, None]]": + """Create provider for database sessions/drivers. + + Args: + config: The database configuration object. + connection_key: The key used to access the connection. + + Returns: + The session provider function. + """ + + async def provide_session(connection: "ConnectionT") -> "AsyncGenerator[DriverT, None]": + """Provide a database session/driver. + + Args: + connection: The database connection. + + Yields: + Database driver/session instance. + """ + yield cast("DriverT", config.driver_type(connection=connection)) + + return provide_session diff --git a/sqlspec/extensions/starlette/config.py b/sqlspec/extensions/starlette/config.py new file mode 100644 index 000000000..0687900c5 --- /dev/null +++ b/sqlspec/extensions/starlette/config.py @@ -0,0 +1,300 @@ +"""Configuration classes for SQLSpec Starlette integration.""" + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, Callable, Literal, Optional, Union, cast + +from sqlspec.exceptions import ImproperConfigurationError + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable + + from starlette.applications import Starlette + from starlette.middleware.base import BaseHTTPMiddleware + from starlette.requests import Request + + from sqlspec.config import AsyncConfigT, DriverT, SyncConfigT + from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase + from sqlspec.typing import ConnectionT, PoolT + + +CommitMode = Literal["manual", "autocommit", "autocommit_include_redirect"] +DEFAULT_COMMIT_MODE: CommitMode = "manual" +DEFAULT_CONNECTION_KEY = "db_connection" +DEFAULT_POOL_KEY = "db_pool" +DEFAULT_SESSION_KEY = "db_session" + +__all__ = ( + "DEFAULT_COMMIT_MODE", + "DEFAULT_CONNECTION_KEY", + "DEFAULT_POOL_KEY", + "DEFAULT_SESSION_KEY", + "AsyncDatabaseConfig", + "CommitMode", + "DatabaseConfig", + "SyncDatabaseConfig", +) + + +@dataclass +class DatabaseConfig: + """Configuration for SQLSpec database integration with Starlette applications.""" + + config: "Union[SyncConfigT, AsyncConfigT]" = field() # type: ignore[valid-type] # pyright: ignore[reportGeneralTypeIssues] + connection_key: str = field(default=DEFAULT_CONNECTION_KEY) + pool_key: str = field(default=DEFAULT_POOL_KEY) + session_key: str = field(default=DEFAULT_SESSION_KEY) + commit_mode: "CommitMode" = field(default=DEFAULT_COMMIT_MODE) + extra_commit_statuses: "Optional[set[int]]" = field(default=None) + extra_rollback_statuses: "Optional[set[int]]" = field(default=None) + enable_middleware: bool = field(default=True) + + # Generated middleware and providers + middleware: "Optional[BaseHTTPMiddleware]" = field(init=False, repr=False, hash=False, default=None) + connection_provider: "Optional[Callable[[], AsyncGenerator[ConnectionT, None]]]" = field( + init=False, repr=False, hash=False, default=None + ) + pool_provider: "Optional[Callable[[], Awaitable[PoolT]]]" = field(init=False, repr=False, hash=False, default=None) + session_provider: "Optional[Callable[[ConnectionT], AsyncGenerator[DriverT, None]]]" = field( + init=False, repr=False, hash=False, default=None + ) + + def __post_init__(self) -> None: + """Initialize providers and middleware after object creation.""" + if not self.config.supports_connection_pooling and self.pool_key == DEFAULT_POOL_KEY: # type: ignore[union-attr,unused-ignore] + self.pool_key = f"_{self.pool_key}_{id(self.config)}" + + # Validate commit mode + if self.commit_mode not in {"manual", "autocommit", "autocommit_include_redirect"}: + msg = f"Invalid commit mode: {self.commit_mode}" + raise ImproperConfigurationError(detail=msg) + + # Validate status code sets + if ( + self.extra_commit_statuses + and self.extra_rollback_statuses + and self.extra_commit_statuses & self.extra_rollback_statuses + ): + msg = "Extra rollback statuses and commit statuses must not share any status codes" + raise ImproperConfigurationError(msg) + + def init_app(self, app: "Starlette") -> None: + """Initialize SQLSpec configuration for Starlette application. + + Args: + app: The Starlette application instance. + """ + from sqlspec.extensions.starlette._middleware import create_session_middleware + from sqlspec.extensions.starlette._providers import ( + create_connection_provider, + create_pool_provider, + create_session_provider, + ) + + # Create providers + self.pool_provider = create_pool_provider(self.config, self.pool_key) + self.connection_provider = create_connection_provider(self.config, self.pool_key, self.connection_key) + self.session_provider = create_session_provider(self.config, self.connection_key) + + # Add middleware if enabled + if self.enable_middleware: + self.middleware = create_session_middleware( + config=self, + commit_mode=self.commit_mode, + extra_commit_statuses=self.extra_commit_statuses, + extra_rollback_statuses=self.extra_rollback_statuses, + ) + app.add_middleware(self.middleware.__class__, dispatch=self.middleware.dispatch) # type: ignore[attr-defined] + + # Add startup and shutdown events + app.add_event_handler("startup", self._startup_handler(app)) + app.add_event_handler("shutdown", self._shutdown_handler(app)) + + def _startup_handler(self, app: "Starlette") -> "Callable[[], Awaitable[None]]": + """Create startup handler for database pool initialization. + + Args: + app: The Starlette application instance. + + Returns: + Startup handler function. + """ + + async def startup() -> None: + from sqlspec.utils.sync_tools import ensure_async_ + + db_pool = await ensure_async_(self.config.create_pool)() + app.state.__dict__[self.pool_key] = db_pool + + return startup + + def _shutdown_handler(self, app: "Starlette") -> "Callable[[], Awaitable[None]]": + """Create shutdown handler for database pool cleanup. + + Args: + app: The Starlette application instance. + + Returns: + Shutdown handler function. + """ + + async def shutdown() -> None: + import contextlib + + from sqlspec.utils.sync_tools import ensure_async_ + + app.state.__dict__.pop(self.pool_key, None) + with contextlib.suppress(Exception): + await ensure_async_(self.config.close_pool)() + + return shutdown + + def get_request_session(self, request: "Request") -> "Union[SyncDriverAdapterBase, AsyncDriverAdapterBase]": + """Get a database session from request state. + + Args: + request: The Starlette request object. + + Returns: + Database session instance. + + Raises: + RuntimeError: If session is not found in request state. + """ + session = getattr(request.state, self.session_key, None) + if session is None: + msg = f"Database session '{self.session_key}' not found in request state. Ensure middleware is enabled." + raise RuntimeError(msg) + return session + + def get_request_connection(self, request: "Request") -> "ConnectionT": + """Get a database connection from request state. + + Args: + request: The Starlette request object. + + Returns: + Database connection instance. + + Raises: + RuntimeError: If connection is not found in request state. + """ + connection = getattr(request.state, self.connection_key, None) + if connection is None: + msg = ( + f"Database connection '{self.connection_key}' not found in request state. Ensure middleware is enabled." + ) + raise RuntimeError(msg) + return connection + + def get_request_pool(self, request: "Request") -> "PoolT": + """Get a database pool from app state. + + Args: + request: The Starlette request object. + + Returns: + Database pool instance. + + Raises: + RuntimeError: If pool is not found in app state. + """ + pool = getattr(request.app.state, self.pool_key, None) + if pool is None: + msg = f"Database pool '{self.pool_key}' not found in app state. Ensure app is properly initialized." + raise RuntimeError(msg) + return pool + + +# Add passthrough methods to both specialized classes for convenience +class SyncDatabaseConfig(DatabaseConfig): + """Sync-specific DatabaseConfig with better typing for get_request_session.""" + + def get_request_session(self, request: "Request") -> "SyncDriverAdapterBase": + """Get a sync database session from request state. + + Args: + request: The Starlette request object. + + Returns: + Sync database session instance. + + Raises: + RuntimeError: If session is not found in request state. + """ + session = super().get_request_session(request) + return cast("SyncDriverAdapterBase", session) + + def get_request_connection(self, request: "Request") -> "ConnectionT": + """Get a sync database connection from request state. + + Args: + request: The Starlette request object. + + Returns: + Sync database connection instance. + + Raises: + RuntimeError: If connection is not found in request state. + """ + return super().get_request_connection(request) + + def get_request_pool(self, request: "Request") -> "PoolT": + """Get a sync database pool from app state. + + Args: + request: The Starlette request object. + + Returns: + Sync database pool instance. + + Raises: + RuntimeError: If pool is not found in app state. + """ + return super().get_request_pool(request) + + +class AsyncDatabaseConfig(DatabaseConfig): + """Async-specific DatabaseConfig with better typing for get_request_session.""" + + def get_request_session(self, request: "Request") -> "AsyncDriverAdapterBase": + """Get an async database session from request state. + + Args: + request: The Starlette request object. + + Returns: + Async database session instance. + + Raises: + RuntimeError: If session is not found in request state. + """ + session = super().get_request_session(request) + return cast("AsyncDriverAdapterBase", session) + + def get_request_connection(self, request: "Request") -> "ConnectionT": + """Get an async database connection from request state. + + Args: + request: The Starlette request object. + + Returns: + Async database connection instance. + + Raises: + RuntimeError: If connection is not found in request state. + """ + return super().get_request_connection(request) + + def get_request_pool(self, request: "Request") -> "PoolT": + """Get an async database pool from app state. + + Args: + request: The Starlette request object. + + Returns: + Async database pool instance. + + Raises: + RuntimeError: If pool is not found in app state. + """ + return super().get_request_pool(request) diff --git a/sqlspec/extensions/starlette/extension.py b/sqlspec/extensions/starlette/extension.py new file mode 100644 index 000000000..64b76b220 --- /dev/null +++ b/sqlspec/extensions/starlette/extension.py @@ -0,0 +1,434 @@ +"""SQLSpec extension for Starlette applications.""" + +from contextlib import asynccontextmanager +from typing import TYPE_CHECKING, Any, Optional, Union, cast, overload + +from sqlspec.base import SQLSpec as SQLSpecBase +from sqlspec.config import AsyncConfigT, DatabaseConfigProtocol, DriverT, SyncConfigT +from sqlspec.exceptions import ImproperConfigurationError +from sqlspec.extensions.starlette.config import DatabaseConfig +from sqlspec.utils.logging import get_logger + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + from starlette.applications import Starlette + from starlette.requests import Request + + from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase + from sqlspec.loader import SQLFileLoader + from sqlspec.typing import ConnectionT, PoolT + +logger = get_logger("extensions.starlette") + +__all__ = ("SQLSpec",) + + +class SQLSpec(SQLSpecBase): + """SQLSpec integration for Starlette applications.""" + + __slots__ = ("_app", "_configs") + + def __init__( + self, + config: Union["SyncConfigT", "AsyncConfigT", "DatabaseConfig", list["DatabaseConfig"]], + *, + loader: "Optional[SQLFileLoader]" = None, + ) -> None: + """Initialize SQLSpec for Starlette. + + Args: + config: Database configuration(s) for SQLSpec. + loader: Optional SQL file loader instance. + """ + super().__init__(loader=loader) + self._app: Union[Starlette, None] = None + + if isinstance(config, DatabaseConfigProtocol): + self._configs: list[DatabaseConfig] = [DatabaseConfig(config=config)] + elif isinstance(config, DatabaseConfig): + self._configs = [config] + else: + self._configs = config + + @property + def config(self) -> list[DatabaseConfig]: + """Return the database configurations. + + Returns: + List of database configurations. + """ + return self._configs + + def init_app(self, app: "Starlette") -> None: + """Initialize SQLSpec with Starlette application. + + Args: + app: The Starlette application instance. + """ + self._app = app + + self._validate_dependency_keys() + + # Store SQLSpec instance in app state for providers + app.state.sqlspec = self + + # Initialize each database configuration + for db_config in self._configs: + # Add the configuration to SQLSpec base + annotation = self.add_config(db_config.config) + db_config.annotation = annotation # type: ignore[attr-defined] + + # Initialize with the app + db_config.init_app(app) + + @overload + def provide_session(self, config: "SyncConfigT") -> "AsyncGenerator[DriverT, None]": ... + + @overload + def provide_session(self, config: "AsyncConfigT") -> "AsyncGenerator[DriverT, None]": ... + + @overload + def provide_session(self, config: "type[Union[SyncConfigT, AsyncConfigT]]") -> "AsyncGenerator[DriverT, None]": ... + + @asynccontextmanager + async def provide_session( + self, config: Union["SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"] + ) -> "AsyncGenerator[DriverT, None]": + """Provide a database session for the given configuration. + + Args: + config: Configuration instance or type to get session for. + + Yields: + Database driver/session instance. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, cfg.annotation): # type: ignore[attr-defined] + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get connection and create session + if db_config.connection_provider: + async with db_config.connection_provider() as connection: + if db_config.session_provider: + async with db_config.session_provider(connection) as session: + yield session + else: + # Fallback: create driver directly + yield db_config.config.driver_type(connection=connection) # type: ignore[attr-defined] + else: + # Fallback: use base class session management + async with super().provide_session(config) as session: + yield session + + @overload + def provide_connection(self, config: "SyncConfigT") -> "AsyncGenerator[ConnectionT, None]": ... + + @overload + def provide_connection(self, config: "AsyncConfigT") -> "AsyncGenerator[ConnectionT, None]": ... + + @overload + def provide_connection( + self, config: "type[Union[SyncConfigT, AsyncConfigT]]" + ) -> "AsyncGenerator[ConnectionT, None]": ... + + @asynccontextmanager + async def provide_connection( + self, config: Union["SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"] + ) -> "AsyncGenerator[ConnectionT, None]": + """Provide a database connection for the given configuration. + + Args: + config: Configuration instance or type to get connection for. + + Yields: + Database connection instance. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, cfg.annotation): # type: ignore[attr-defined] + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get connection + if db_config.connection_provider: + async with db_config.connection_provider() as connection: + yield connection + else: + # Fallback: use base class connection management + async with super().provide_connection(config) as connection: + yield connection + + @overload + def provide_pool(self, config: "SyncConfigT") -> "PoolT": ... + + @overload + def provide_pool(self, config: "AsyncConfigT") -> "PoolT": ... + + @overload + def provide_pool(self, config: "type[Union[SyncConfigT, AsyncConfigT]]") -> "PoolT": ... + + async def provide_pool( + self, config: Union["SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"] + ) -> "PoolT": + """Provide a database pool for the given configuration. + + Args: + config: Configuration instance or type to get pool for. + + Returns: + Database connection pool. + + Raises: + KeyError: If configuration is not found. + """ + # Find the database config for this configuration + db_config = None + for cfg in self._configs: + if config in (cfg.config, cfg.annotation): # type: ignore[attr-defined] + db_config = cfg + break + + if db_config is None: + msg = f"No configuration found for {config}" + raise KeyError(msg) + + # Get pool + if db_config.pool_provider: + return await db_config.pool_provider() + # Fallback: create pool directly + from sqlspec.utils.sync_tools import ensure_async_ + + return await ensure_async_(db_config.config.create_pool)() + + def get_annotation( + self, key: Union[str, "SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"] + ) -> "type[Union[SyncConfigT, AsyncConfigT]]": + """Return the annotation for the given configuration. + + Args: + key: The configuration instance, type, or key to lookup. + + Returns: + The annotation for the configuration. + + Raises: + KeyError: If no configuration is found for the given key. + """ + for cfg in self.config: + if key in (cfg.config, cfg.annotation, cfg.connection_key, cfg.pool_key): + return cfg.annotation # type: ignore[attr-defined] + msg = f"No configuration found for {key}. Available keys: {self._get_available_keys()}" + raise KeyError(msg) + + def provide_request_session( + self, + key: Union[str, "SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"], + request: "Request", + ) -> "Union[SyncDriverAdapterBase, AsyncDriverAdapterBase]": + """Provide a database session for the specified configuration key from request scope. + + This is a convenience method that combines get_config and get_request_session + into a single call, similar to Advanced Alchemy's provide_session pattern. + + Args: + key: The configuration identifier (same as get_config) + request: The Starlette Request object + + Returns: + A driver session instance for the specified database configuration + + Example: + >>> sqlspec_plugin = request.app.state.sqlspec + >>> # Direct session access by key + >>> auth_session = sqlspec_plugin.provide_request_session( + ... "auth_db", request + ... ) + >>> analytics_session = sqlspec_plugin.provide_request_session( + ... "analytics_db", request + ... ) + """ + # Get DatabaseConfig wrapper for Starlette methods + db_config = self._get_database_config(key) + return db_config.get_request_session(request) + + def provide_sync_request_session( + self, key: Union[str, "SyncConfigT", "type[SyncConfigT]"], request: "Request" + ) -> "SyncDriverAdapterBase": + """Provide a sync database session for the specified configuration key from request scope. + + This method provides better type hints for sync database sessions, ensuring the returned + session is properly typed as SyncDriverAdapterBase for better IDE support and type safety. + + Args: + key: The sync configuration identifier + request: The Starlette Request object + + Returns: + A sync driver session instance for the specified database configuration + """ + # Get DatabaseConfig wrapper for Starlette methods + db_config = self._get_database_config(key) + session = db_config.get_request_session(request) + return cast("SyncDriverAdapterBase", session) + + def provide_async_request_session( + self, key: Union[str, "AsyncConfigT", "type[AsyncConfigT]"], request: "Request" + ) -> "AsyncDriverAdapterBase": + """Provide an async database session for the specified configuration key from request scope. + + This method provides better type hints for async database sessions, ensuring the returned + session is properly typed as AsyncDriverAdapterBase for better IDE support and type safety. + + Args: + key: The async configuration identifier + request: The Starlette Request object + + Returns: + An async driver session instance for the specified database configuration + """ + # Get DatabaseConfig wrapper for Starlette methods + db_config = self._get_database_config(key) + session = db_config.get_request_session(request) + return cast("AsyncDriverAdapterBase", session) + + def provide_request_connection( + self, + key: Union[str, "SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"], + request: "Request", + ) -> Any: + """Provide a database connection for the specified configuration key from request scope. + + This is a convenience method that combines get_config and get_request_connection + into a single call. + + Args: + key: The configuration identifier (same as get_config) + request: The Starlette Request object + + Returns: + A database connection instance for the specified database configuration + """ + # Get DatabaseConfig wrapper for Starlette methods + db_config = self._get_database_config(key) + return db_config.get_request_connection(request) + + def get_config( + self, name: Union["type[DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]]", str, Any] + ) -> Union["DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]", DatabaseConfig]: + """Get a configuration instance by name, supporting both base behavior and Starlette extensions. + + This method extends the base get_config to support Starlette-specific lookup patterns + while maintaining compatibility with the base class signature. It supports lookup by + connection key, pool key, session key, config instance, or annotation type. + + Args: + name: The configuration identifier - can be: + - Type annotation (base class behavior) + - connection_key (e.g., "auth_db_connection") + - pool_key (e.g., "analytics_db_pool") + - session_key (e.g., "reporting_db_session") + - config instance + - annotation type + + Raises: + KeyError: If no configuration is found for the given name. + + Returns: + The configuration instance for the specified name. + """ + # First try base class behavior for type-based lookup + # Only call super() if name matches the expected base class types + if not isinstance(name, str): + try: + return super().get_config(name) # type: ignore[no-any-return] + except (KeyError, AttributeError): + # Fall back to Starlette-specific lookup patterns + pass + + # Starlette-specific lookups by string keys + if isinstance(name, str): + for c in self.config: + if name in {c.connection_key, c.pool_key, c.session_key}: + return c # Return the DatabaseConfig wrapper for string lookups + + # Lookup by config instance or annotation + for c in self.config: + annotation_match = hasattr(c, "annotation") and name == c.annotation + if name == c.config or annotation_match: + return c.config # Return the underlying config for type-based lookups + + msg = f"No database configuration found for name '{name}'. Available keys: {self._get_available_keys()}" + raise KeyError(msg) + + def _get_database_config( + self, key: Union[str, "SyncConfigT", "AsyncConfigT", "type[Union[SyncConfigT, AsyncConfigT]]"] + ) -> DatabaseConfig: + """Get a DatabaseConfig wrapper instance by name. + + This is used internally by provide_request_session and provide_request_connection + to get the DatabaseConfig wrapper that has the request session methods. + + Args: + key: The configuration identifier + + Returns: + The DatabaseConfig wrapper instance + + Raises: + KeyError: If no configuration is found for the given key + """ + # For string keys, lookup by connection/pool/session keys + if isinstance(key, str): + for c in self.config: + if key in {c.connection_key, c.pool_key, c.session_key}: + return c + + # For other keys, lookup by config instance or annotation + for c in self.config: + annotation_match = hasattr(c, "annotation") and key == c.annotation + if key == c.config or annotation_match: + return c + + msg = f"No database configuration found for name '{key}'. Available keys: {self._get_available_keys()}" + raise KeyError(msg) + + def _get_available_keys(self) -> list[str]: + """Get a list of all available configuration keys for error messages.""" + keys = [] + for c in self.config: + keys.extend([c.connection_key, c.pool_key, c.session_key]) + return keys + + def _validate_dependency_keys(self) -> None: + """Validate that connection and pool keys are unique across configurations. + + Raises: + ImproperConfigurationError: If connection keys or pool keys are not unique. + """ + connection_keys = [c.connection_key for c in self.config] + pool_keys = [c.pool_key for c in self.config] + if len(set(connection_keys)) != len(connection_keys): + msg = "When using multiple database configuration, each configuration must have a unique `connection_key`." + raise ImproperConfigurationError(detail=msg) + if len(set(pool_keys)) != len(pool_keys): + msg = "When using multiple database configuration, each configuration must have a unique `pool_key`." + raise ImproperConfigurationError(detail=msg) diff --git a/sqlspec/utils/portal.py b/sqlspec/utils/portal.py new file mode 100644 index 000000000..7d0704670 --- /dev/null +++ b/sqlspec/utils/portal.py @@ -0,0 +1,198 @@ +"""This module provides a portal provider and portal for calling async functions from synchronous code.""" + +import asyncio +import functools +import queue +import threading +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, cast +from warnings import warn + +from sqlspec.exceptions import ImproperConfigurationError + +if TYPE_CHECKING: + from collections.abc import Coroutine + +__all__ = ("Portal", "PortalProvider", "PortalProviderSingleton") + +_R = TypeVar("_R") + + +class PortalProviderSingleton(type): + """A singleton metaclass for PortalProvider that creates unique instances per event loop.""" + + _instances: "ClassVar[dict[tuple[type, Optional[asyncio.AbstractEventLoop]], PortalProvider]]" = {} + + def __call__(cls, *args: Any, **kwargs: Any) -> "PortalProvider": + # Use a tuple of the class and loop as the key + key = (cls, kwargs.get("loop")) + if key not in cls._instances: + cls._instances[key] = super().__call__(*args, **kwargs) + return cls._instances[key] + + +class PortalProvider(metaclass=PortalProviderSingleton): + """A provider for creating and managing threaded portals.""" + + def __init__(self, /, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + """Initialize the PortalProvider.""" + self._request_queue: queue.Queue[ + tuple[ + Callable[..., Coroutine[Any, Any, Any]], + tuple[Any, ...], + dict[str, Any], + queue.Queue[tuple[Optional[Any], Optional[Exception]]], + ] + ] = queue.Queue() + self._result_queue: queue.Queue[tuple[Optional[Any], Optional[Exception]]] = queue.Queue() + self._loop: Optional[asyncio.AbstractEventLoop] = loop + self._thread: Optional[threading.Thread] = None + self._ready_event: threading.Event = threading.Event() + + @property + def portal(self) -> "Portal": + """The portal instance.""" + return Portal(self) + + @property + def is_running(self) -> bool: + """Whether the portal provider is running.""" + return self._thread is not None and self._thread.is_alive() + + @property + def is_ready(self) -> bool: + """Whether the portal provider is ready.""" + return self._ready_event.is_set() + + @property + def loop(self) -> "asyncio.AbstractEventLoop": # pragma: no cover + """The event loop. + + Raises: + ImproperConfigurationError: If the portal provider is not started. + """ + if self._loop is None: + msg = "The PortalProvider is not started. Did you forget to call .start()?" + raise ImproperConfigurationError(msg) + return self._loop + + def start(self) -> None: + """Starts the background thread and event loop.""" + if self._thread is not None: # pragma: no cover + warn("PortalProvider already started", stacklevel=2) + return + self._thread = threading.Thread(target=self._run_event_loop, daemon=True) + self._thread.start() + self._ready_event.wait() # Wait for the loop to be ready + + def stop(self) -> None: + """Stops the background thread and event loop.""" + if self._loop is None or self._thread is None: + return + + self._loop.call_soon_threadsafe(self._loop.stop) + self._thread.join() + self._loop.close() + self._loop = None + self._thread = None + self._ready_event.clear() + + def _run_event_loop(self) -> None: # pragma: no cover + """The main function of the background thread.""" + if self._loop is None: + self._loop = asyncio.new_event_loop() + asyncio.set_event_loop(self._loop) + self._ready_event.set() # Signal that the loop is ready + self._loop.run_forever() + + @staticmethod + async def _async_caller( + func: "Callable[..., Coroutine[Any, Any, _R]]", args: tuple[Any, ...], kwargs: dict[str, Any] + ) -> _R: + """Wrapper to run the async function and send the result to the result queue. + + Args: + func: The async function to call. + args: Positional arguments to the function. + kwargs: Keyword arguments to the function. + + Returns: + The result of the async function. + """ + result: _R = await func(*args, **kwargs) + return result + + def call(self, func: "Callable[..., Coroutine[Any, Any, _R]]", *args: Any, **kwargs: Any) -> _R: + """Calls an async function from a synchronous context. + + Args: + func: The async function to call. + *args: Positional arguments to the function. + **kwargs: Keyword arguments to the function. + + Raises: + ImproperConfigurationError: If the portal provider is not started. + + Returns: + The result of the async function. + """ + if self._loop is None: + msg = "The PortalProvider is not started. Did you forget to call .start()?" + raise ImproperConfigurationError(msg) + + # Create a new result queue + local_result_queue: queue.Queue[tuple[Optional[_R], Optional[Exception]]] = queue.Queue() + + # Send the request to the background thread + self._request_queue.put((func, args, kwargs, local_result_queue)) + + # Trigger the execution in the event loop + _handle = self._loop.call_soon_threadsafe(self._process_request) + + # Wait for the result from the background thread + result, exception = local_result_queue.get() + + if exception: + raise exception + return cast("_R", result) + + def _process_request(self) -> None: # pragma: no cover + """Processes a request from the request queue in the event loop.""" + assert self._loop is not None + + if not self._request_queue.empty(): + func, args, kwargs, local_result_queue = self._request_queue.get() + future = asyncio.run_coroutine_threadsafe(self._async_caller(func, args, kwargs), self._loop) + + # Attach a callback to handle the result/exception + future.add_done_callback( + functools.partial(self._handle_future_result, local_result_queue=local_result_queue) # pyright: ignore[reportArgumentType] + ) + + @staticmethod + def _handle_future_result( + future: "asyncio.Future[Any]", local_result_queue: "queue.Queue[tuple[Optional[Any], Optional[Exception]]]" + ) -> None: # pragma: no cover + """Handles the result or exception from the completed future.""" + try: + result = future.result() + local_result_queue.put((result, None)) + except Exception as e: + local_result_queue.put((None, e)) + + +class Portal: + def __init__(self, provider: "PortalProvider") -> None: + self._provider = provider + + def call(self, func: "Callable[..., Coroutine[Any, Any, _R]]", *args: Any, **kwargs: Any) -> _R: + """Calls an async function using the associated PortalProvider. + + Args: + func: The async function to call. + *args: Positional arguments to the function. + **kwargs: Keyword arguments to the function. + + Returns: + The result of the async function. + """ + return self._provider.call(func, *args, **kwargs) diff --git a/tests/integration/test_extensions/test_fastapi/__init__.py b/tests/integration/test_extensions/test_fastapi/__init__.py new file mode 100644 index 000000000..a46a3cfa2 --- /dev/null +++ b/tests/integration/test_extensions/test_fastapi/__init__.py @@ -0,0 +1 @@ +# FastAPI extension tests diff --git a/tests/integration/test_extensions/test_fastapi/test_extension.py b/tests/integration/test_extensions/test_fastapi/test_extension.py new file mode 100644 index 000000000..29ef9379d --- /dev/null +++ b/tests/integration/test_extensions/test_fastapi/test_extension.py @@ -0,0 +1,143 @@ +"""Tests for SQLSpec FastAPI extension.""" + +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from sqlspec.adapters.sqlite import SqliteConfig +from sqlspec.extensions.fastapi import DatabaseConfig, SQLSpec + + +@pytest.fixture +def sqlite_config() -> SqliteConfig: + """Create an in-memory SQLite configuration for testing.""" + return SqliteConfig(database=":memory:") + + +@pytest.fixture +def database_config(sqlite_config: SqliteConfig) -> DatabaseConfig: + """Create a database configuration for testing.""" + return DatabaseConfig(config=sqlite_config) + + +@pytest.fixture +def sqlspec_extension(database_config: DatabaseConfig) -> SQLSpec: + """Create a SQLSpec extension for testing.""" + return SQLSpec(config=database_config) + + +@pytest.fixture +def fastapi_app(sqlspec_extension: SQLSpec, database_config: DatabaseConfig) -> FastAPI: + """Create a FastAPI application with SQLSpec configured.""" + app = FastAPI() + + # Initialize SQLSpec with the app + sqlspec_extension.init_app(app) + + @app.get("/test") + async def test_endpoint(): + """Test endpoint that uses the database connection.""" + # Check if connection is available in app state + has_pool = hasattr(app.state, database_config.pool_key) + return {"has_pool": has_pool} + + return app + + +def test_sqlspec_fastapi_initialization(fastapi_app: FastAPI, sqlspec_extension: SQLSpec): + """Test that SQLSpec initializes properly with FastAPI.""" + assert sqlspec_extension._app == fastapi_app + assert len(sqlspec_extension.config) == 1 + + +def test_sqlspec_fastapi_configuration(database_config: DatabaseConfig): + """Test database configuration properties.""" + assert database_config.connection_key == "db_connection" + assert database_config.pool_key == "db_pool" + assert database_config.session_key == "db_session" + assert database_config.commit_mode == "manual" + + +def test_fastapi_app_with_middleware(fastapi_app: FastAPI): + """Test that the FastAPI app works with SQLSpec middleware.""" + client = TestClient(fastapi_app) + + # Make a request to test the middleware + response = client.get("/test") + assert response.status_code == 200 + + # The response should indicate the pool is available + data = response.json() + assert "has_pool" in data + + +def test_provide_session_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_session context manager.""" + + async def test_session(): + async with sqlspec_extension.provide_session(sqlite_config) as session: + assert session is not None + # Test that we can execute a simple query + result = await session.execute("SELECT 1 as test") + assert result is not None + + import asyncio + + asyncio.run(test_session()) + + +def test_provide_connection_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_connection context manager.""" + + async def test_connection(): + async with sqlspec_extension.provide_connection(sqlite_config) as connection: + assert connection is not None + + import asyncio + + asyncio.run(test_connection()) + + +def test_multiple_database_configs(): + """Test SQLSpec with multiple database configurations.""" + config1 = DatabaseConfig( + config=SqliteConfig(database=":memory:"), connection_key="db1_connection", pool_key="db1_pool" + ) + config2 = DatabaseConfig( + config=SqliteConfig(database=":memory:"), connection_key="db2_connection", pool_key="db2_pool" + ) + + sqlspec = SQLSpec(config=[config1, config2]) + app = FastAPI() + sqlspec.init_app(app) + + assert len(sqlspec.config) == 2 + assert sqlspec.config[0].connection_key == "db1_connection" + assert sqlspec.config[1].connection_key == "db2_connection" + + +def test_database_config_validation(): + """Test database configuration validation.""" + config = SqliteConfig(database=":memory:") + + # Test invalid commit mode + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, commit_mode="invalid") # type: ignore[arg-type] + + # Test conflicting status codes + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, extra_commit_statuses={200, 201}, extra_rollback_statuses={200, 500}) + + +def test_cli_integration(): + """Test CLI integration functions.""" + from sqlspec.extensions.fastapi.cli import register_database_commands + + app = FastAPI() + + # Register database commands + db_group = register_database_commands(app) + assert db_group.name == "db" + + # Check that commands were added + assert len(db_group.commands) > 0 diff --git a/tests/integration/test_extensions/test_flask/__init__.py b/tests/integration/test_extensions/test_flask/__init__.py new file mode 100644 index 000000000..d1abfaec1 --- /dev/null +++ b/tests/integration/test_extensions/test_flask/__init__.py @@ -0,0 +1 @@ +# Flask extension tests diff --git a/tests/integration/test_extensions/test_flask/test_extension.py b/tests/integration/test_extensions/test_flask/test_extension.py new file mode 100644 index 000000000..1fc8b901d --- /dev/null +++ b/tests/integration/test_extensions/test_flask/test_extension.py @@ -0,0 +1,164 @@ +"""Tests for SQLSpec Flask extension.""" + +import pytest + +from sqlspec.adapters.sqlite import SqliteConfig, SqliteConnectionParams +from sqlspec.extensions.flask import DatabaseConfig, FlaskServiceMixin, SQLSpec + + +@pytest.fixture +def sqlite_config() -> SqliteConfig: + """Create an in-memory SQLite configuration for testing.""" + return SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + +@pytest.fixture +def database_config(sqlite_config: SqliteConfig) -> DatabaseConfig: + """Create a database configuration for testing.""" + return DatabaseConfig(config=sqlite_config) + + +@pytest.fixture +def sqlspec_extension(database_config: DatabaseConfig) -> SQLSpec: + """Create a SQLSpec extension for testing.""" + return SQLSpec(config=database_config) + + +@pytest.fixture +def flask_app(sqlspec_extension: SQLSpec, database_config: DatabaseConfig): + """Create a Flask application with SQLSpec configured.""" + try: + from flask import Flask + except ImportError: + pytest.skip("Flask not available") + + app = Flask(__name__) + + # Initialize SQLSpec with the app + sqlspec_extension.init_app(app) + + @app.route("/test") + def test_endpoint(): + """Test endpoint that uses the database session.""" + session = sqlspec_extension.get_session() + return {"has_session": session is not None} + + return app + + +def test_sqlspec_flask_initialization(flask_app, sqlspec_extension: SQLSpec): + """Test that SQLSpec initializes properly with Flask.""" + assert sqlspec_extension._app == flask_app + assert len(sqlspec_extension.config) == 1 + + +def test_sqlspec_flask_configuration(database_config: DatabaseConfig): + """Test database configuration properties.""" + assert database_config.connection_key == "db_connection" + assert database_config.pool_key == "db_pool" + assert database_config.session_key == "db_session" + assert database_config.commit_mode == "manual" + + +def test_flask_app_with_extension(flask_app): + """Test that the Flask app works with SQLSpec extension.""" + with flask_app.test_client() as client: + # Test that the app starts without errors + response = client.get("/test") + assert response.status_code == 200 + + # The response should indicate session is available + data = response.get_json() + assert "has_session" in data + + +def test_provide_session_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_session context manager.""" + + async def test_session(): + async with sqlspec_extension.provide_session(sqlite_config) as session: + assert session is not None + # Test that we can execute a simple query + result = session.execute("SELECT 1 as test") + assert result is not None + + import asyncio + + asyncio.run(test_session()) + + +def test_multiple_database_configs(): + """Test SQLSpec with multiple database configurations.""" + config1 = DatabaseConfig( + config=SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")), + connection_key="db1_connection", + pool_key="db1_pool", + ) + config2 = DatabaseConfig( + config=SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")), + connection_key="db2_connection", + pool_key="db2_pool", + ) + + sqlspec = SQLSpec(config=[config1, config2]) + + try: + from flask import Flask + + app = Flask(__name__) + sqlspec.init_app(app) + + assert len(sqlspec.config) == 2 + assert sqlspec.config[0].connection_key == "db1_connection" + assert sqlspec.config[1].connection_key == "db2_connection" + except ImportError: + pytest.skip("Flask not available") + + +def test_database_config_validation(): + """Test database configuration validation.""" + config = SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + # Test invalid commit mode + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, commit_mode="invalid") # type: ignore[arg-type] + + # Test conflicting status codes + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, extra_commit_statuses={200, 201}, extra_rollback_statuses={200, 500}) + + +def test_flask_service_mixin(): + """Test FlaskServiceMixin functionality.""" + + class TestService(FlaskServiceMixin): + def get_data(self): + return {"message": "test"} + + service = TestService() + + # Test without Flask context (should not crash) + assert hasattr(service, "jsonify") + + try: + from flask import Flask + + app = Flask(__name__) + with app.app_context(): + # Test jsonify method + response = service.jsonify({"test": "data"}) + assert response.status_code == 200 + except ImportError: + pytest.skip("Flask not available") + + +def test_cli_integration(): + """Test CLI integration functions.""" + try: + from sqlspec.extensions.flask.cli import database_group + + assert database_group.name == "db" + # Check that commands were added + assert len(database_group.commands) > 0 + except ImportError: + pytest.skip("Flask not available") diff --git a/tests/integration/test_extensions/test_sanic/__init__.py b/tests/integration/test_extensions/test_sanic/__init__.py new file mode 100644 index 000000000..f18b7680c --- /dev/null +++ b/tests/integration/test_extensions/test_sanic/__init__.py @@ -0,0 +1 @@ +# Sanic extension tests diff --git a/tests/integration/test_extensions/test_sanic/test_extension.py b/tests/integration/test_extensions/test_sanic/test_extension.py new file mode 100644 index 000000000..699d7b03e --- /dev/null +++ b/tests/integration/test_extensions/test_sanic/test_extension.py @@ -0,0 +1,168 @@ +"""Tests for SQLSpec Sanic extension.""" + +import pytest + +from sqlspec.adapters.sqlite import SqliteConfig, SqliteConnectionParams +from sqlspec.extensions.sanic import DatabaseConfig, SQLSpec + + +@pytest.fixture +def sqlite_config() -> SqliteConfig: + """Create an in-memory SQLite configuration for testing.""" + return SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + +@pytest.fixture +def database_config(sqlite_config: SqliteConfig) -> DatabaseConfig: + """Create a database configuration for testing.""" + return DatabaseConfig(config=sqlite_config) + + +@pytest.fixture +def sqlspec_extension(database_config: DatabaseConfig) -> SQLSpec: + """Create a SQLSpec extension for testing.""" + return SQLSpec(config=database_config) + + +@pytest.fixture +def sanic_app(sqlspec_extension: SQLSpec, database_config: DatabaseConfig): + """Create a Sanic application with SQLSpec configured.""" + try: + import uuid + + from sanic import Sanic + from sanic.response import json + except ImportError: + pytest.skip("Sanic not available") + + app = Sanic(f"test_app_{uuid.uuid4().hex[:8]}") + + # Initialize SQLSpec with the app + sqlspec_extension.init_app(app) + + @app.route("/test") + async def test_endpoint(request): + """Test endpoint that uses the database session.""" + try: + session = sqlspec_extension.get_session(request) + has_session = session is not None + except Exception: + has_session = False + return json({"has_session": has_session}) + + return app + + +def test_sqlspec_sanic_initialization(sanic_app, sqlspec_extension: SQLSpec): + """Test that SQLSpec initializes properly with Sanic.""" + assert sqlspec_extension._app == sanic_app + assert len(sqlspec_extension.config) == 1 + + +def test_sqlspec_sanic_configuration(database_config: DatabaseConfig): + """Test database configuration properties.""" + assert database_config.connection_key == "db_connection" + assert database_config.pool_key == "db_pool" + assert database_config.session_key == "db_session" + assert database_config.commit_mode == "manual" + + +def test_sanic_app_with_extension(sanic_app): + """Test that the Sanic app works with SQLSpec extension.""" + try: + from sanic_testing import TestClient + except ImportError: + pytest.skip("Sanic testing not available") + + client = TestClient(sanic_app) + + # Test that the app starts without errors + request, response = client.get("/test") + assert response.status == 200 + + # The response should indicate session handling + data = response.json + assert "has_session" in data + + +def test_provide_session_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_session context manager.""" + + async def test_session(): + async with sqlspec_extension.provide_session(sqlite_config) as session: + assert session is not None + # Test that we can execute a simple query + result = session.execute("SELECT 1 as test") + assert result is not None + + import asyncio + + asyncio.run(test_session()) + + +def test_provide_connection_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_connection context manager.""" + + async def test_connection(): + async with sqlspec_extension.provide_connection(sqlite_config) as connection: + assert connection is not None + + import asyncio + + asyncio.run(test_connection()) + + +def test_multiple_database_configs(): + """Test SQLSpec with multiple database configurations.""" + config1 = DatabaseConfig( + config=SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")), + connection_key="db1_connection", + pool_key="db1_pool", + ) + config2 = DatabaseConfig( + config=SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")), + connection_key="db2_connection", + pool_key="db2_pool", + ) + + sqlspec = SQLSpec(config=[config1, config2]) + + try: + import uuid + + from sanic import Sanic + + app = Sanic(f"test_app_{uuid.uuid4().hex[:8]}") + sqlspec.init_app(app) + + assert len(sqlspec.config) == 2 + assert sqlspec.config[0].connection_key == "db1_connection" + assert sqlspec.config[1].connection_key == "db2_connection" + except ImportError: + pytest.skip("Sanic not available") + + +def test_database_config_validation(): + """Test database configuration validation.""" + config = SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + # Test invalid commit mode + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, commit_mode="invalid") # type: ignore[arg-type] + + # Test conflicting status codes + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, extra_commit_statuses={200, 201}, extra_rollback_statuses={200, 500}) + + +def test_unique_context_keys(database_config: DatabaseConfig): + """Test that unique context keys are generated.""" + assert database_config.engine_key.startswith("engine_") + assert database_config.session_maker_key.startswith("session_maker_") + + # Test different configs get different keys + config2 = DatabaseConfig( + config=SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")), connection_key="db2_connection" + ) + assert database_config.engine_key != config2.engine_key + assert database_config.session_maker_key != config2.session_maker_key diff --git a/tests/integration/test_extensions/test_starlette/__init__.py b/tests/integration/test_extensions/test_starlette/__init__.py new file mode 100644 index 000000000..c4ea75848 --- /dev/null +++ b/tests/integration/test_extensions/test_starlette/__init__.py @@ -0,0 +1 @@ +# Starlette extension tests diff --git a/tests/integration/test_extensions/test_starlette/test_extension.py b/tests/integration/test_extensions/test_starlette/test_extension.py new file mode 100644 index 000000000..71fb894f9 --- /dev/null +++ b/tests/integration/test_extensions/test_starlette/test_extension.py @@ -0,0 +1,133 @@ +"""Tests for SQLSpec Starlette extension.""" + +import pytest +from starlette.applications import Starlette +from starlette.responses import JSONResponse +from starlette.testclient import TestClient + +from sqlspec.adapters.sqlite import SqliteConfig +from sqlspec.extensions.starlette import DatabaseConfig, SQLSpec + + +@pytest.fixture +def sqlite_config() -> SqliteConfig: + """Create an in-memory SQLite configuration for testing.""" + return SqliteConfig(database=":memory:") + + +@pytest.fixture +def database_config(sqlite_config: SqliteConfig) -> DatabaseConfig: + """Create a database configuration for testing.""" + return DatabaseConfig(config=sqlite_config) + + +@pytest.fixture +def sqlspec_extension(database_config: DatabaseConfig) -> SQLSpec: + """Create a SQLSpec extension for testing.""" + return SQLSpec(config=database_config) + + +@pytest.fixture +def starlette_app(sqlspec_extension: SQLSpec, database_config: DatabaseConfig) -> Starlette: + """Create a Starlette application with SQLSpec configured.""" + app = Starlette() + + # Initialize SQLSpec with the app + sqlspec_extension.init_app(app) + + @app.route("/test") + async def test_endpoint(request): + """Test endpoint that uses the database connection.""" + # Check if connection is available in request state + connection_key = database_config.connection_key + has_connection = hasattr(request.state, connection_key) + return JSONResponse({"has_connection": has_connection}) + + return app + + +def test_sqlspec_starlette_initialization(starlette_app: Starlette, sqlspec_extension: SQLSpec): + """Test that SQLSpec initializes properly with Starlette.""" + assert sqlspec_extension._app == starlette_app + assert len(sqlspec_extension.config) == 1 + + +def test_sqlspec_starlette_configuration(database_config: DatabaseConfig): + """Test database configuration properties.""" + assert database_config.connection_key == "db_connection" + assert database_config.pool_key == "db_pool" + assert database_config.session_key == "db_session" + assert database_config.commit_mode == "manual" + + +def test_starlette_app_with_middleware(starlette_app: Starlette): + """Test that the Starlette app works with SQLSpec middleware.""" + client = TestClient(starlette_app) + + # Make a request to test the middleware + response = client.get("/test") + assert response.status_code == 200 + + # The response should indicate middleware is working + data = response.json() + # Note: This might be False if middleware isn't properly setting up connections + # but the test should pass without errors + assert "has_connection" in data + + +def test_provide_session_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_session context manager.""" + + async def test_session(): + async with sqlspec_extension.provide_session(sqlite_config) as session: + assert session is not None + # Test that we can execute a simple query + result = await session.execute("SELECT 1 as test") + assert result is not None + + import asyncio + + asyncio.run(test_session()) + + +def test_provide_connection_context_manager(sqlspec_extension: SQLSpec, sqlite_config: SqliteConfig): + """Test the provide_connection context manager.""" + + async def test_connection(): + async with sqlspec_extension.provide_connection(sqlite_config) as connection: + assert connection is not None + + import asyncio + + asyncio.run(test_connection()) + + +def test_multiple_database_configs(): + """Test SQLSpec with multiple database configurations.""" + config1 = DatabaseConfig( + config=SqliteConfig(database=":memory:"), connection_key="db1_connection", pool_key="db1_pool" + ) + config2 = DatabaseConfig( + config=SqliteConfig(database=":memory:"), connection_key="db2_connection", pool_key="db2_pool" + ) + + sqlspec = SQLSpec(config=[config1, config2]) + app = Starlette() + sqlspec.init_app(app) + + assert len(sqlspec.config) == 2 + assert sqlspec.config[0].connection_key == "db1_connection" + assert sqlspec.config[1].connection_key == "db2_connection" + + +def test_database_config_validation(): + """Test database configuration validation.""" + config = SqliteConfig(database=":memory:") + + # Test invalid commit mode + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, commit_mode="invalid") # type: ignore[arg-type] + + # Test conflicting status codes + with pytest.raises(Exception): # Should raise ImproperConfigurationError + DatabaseConfig(config=config, extra_commit_statuses={200, 201}, extra_rollback_statuses={200, 500}) diff --git a/tests/unit/test_extensions/test_flask/test_providers.py b/tests/unit/test_extensions/test_flask/test_providers.py new file mode 100644 index 000000000..27ae51e2e --- /dev/null +++ b/tests/unit/test_extensions/test_flask/test_providers.py @@ -0,0 +1,216 @@ +"""Tests for SQLSpec Flask provider functions.""" + +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from sqlspec.adapters.sqlite import SqliteConfig, SqliteConnectionParams +from sqlspec.extensions.flask._providers import ( + create_connection_provider, + create_pool_provider, + create_session_provider, +) + +# Test fixtures + + +@pytest.fixture +def mock_config() -> Mock: + """Create a mock database configuration.""" + config = Mock() + config.create_pool = AsyncMock(return_value="mock_pool") + config.close_pool = AsyncMock() + config.provide_connection = Mock() + config.driver_type = Mock() + return config + + +@pytest.fixture +def sqlite_config() -> SqliteConfig: + """Create a real SQLite configuration for testing.""" + return SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + +# Basic provider creation tests + + +def test_create_pool_provider(mock_config: Mock) -> None: + """Test pool provider creation returns sync function for Flask.""" + pool_provider = create_pool_provider(mock_config, "test_pool_key") + + assert callable(pool_provider) + + # For Flask, this should return a sync function that uses portal + # Test the provider function returns expected result + result = pool_provider() + # The result should be whatever the portal.call returns + assert result is not None + + +def test_create_connection_provider(mock_config: Mock) -> None: + """Test connection provider creation returns sync function for Flask.""" + # Setup mock connection context manager + mock_connection = Mock() + mock_cm = AsyncMock() + mock_cm.__aenter__ = AsyncMock(return_value=mock_connection) + mock_cm.__aexit__ = AsyncMock() + mock_config.provide_connection.return_value = mock_cm + + connection_provider = create_connection_provider(mock_config, "test_pool_key", "test_connection_key") + + assert callable(connection_provider) + + # For Flask, this should return a sync function that uses portal + result = connection_provider() + assert result is not None + + +def test_create_session_provider(mock_config: Mock) -> None: + """Test session provider creation returns sync function for Flask.""" + mock_connection = Mock() + mock_driver = Mock() + mock_config.driver_type.return_value = mock_driver + + session_provider = create_session_provider(mock_config, "test_connection_key") + + assert callable(session_provider) + + # For Flask, this should return a sync function that uses portal + result = session_provider(mock_connection) + assert result is not None + + +# Portal integration tests + + +@patch("sqlspec.extensions.flask._providers.PortalProvider") +def test_pool_provider_uses_portal(mock_portal_class: Mock, mock_config: Mock) -> None: + """Test that pool provider uses portal for sync operation.""" + # Setup mock portal + mock_portal = Mock() + mock_portal.is_running = False + mock_portal.start = Mock() + mock_portal.call = Mock(return_value="mock_pool") + mock_portal_class.return_value = mock_portal + + # Create the provider + pool_provider = create_pool_provider(mock_config, "test_pool") + + # Call the provider (which should be the sync wrapper) + result = pool_provider() + + assert result == "mock_pool" + mock_portal_class.assert_called_once() + mock_portal.start.assert_called_once() + mock_portal.call.assert_called_once() + + +@patch("sqlspec.extensions.flask._providers.PortalProvider") +def test_connection_provider_uses_portal(mock_portal_class: Mock, mock_config: Mock) -> None: + """Test that connection provider uses portal for sync operation.""" + # Setup mock portal + mock_portal = Mock() + mock_portal.is_running = True # Already running + mock_portal.start = Mock() + mock_portal.call = Mock(return_value="mock_connection") + mock_portal_class.return_value = mock_portal + + # Create the provider + connection_provider = create_connection_provider(mock_config, "pool", "conn") + + # Call the provider + result = connection_provider() + + assert result == "mock_connection" + mock_portal_class.assert_called_once() + mock_portal.start.assert_not_called() # Already running + mock_portal.call.assert_called_once() + + +@patch("sqlspec.extensions.flask._providers.PortalProvider") +def test_session_provider_uses_portal(mock_portal_class: Mock, mock_config: Mock) -> None: + """Test that session provider uses portal for sync operation.""" + # Setup mock portal + mock_portal = Mock() + mock_portal.is_running = False + mock_portal.start = Mock() + mock_portal.call = Mock(return_value="mock_session") + mock_portal_class.return_value = mock_portal + + mock_connection = Mock() + + # Create the provider + session_provider = create_session_provider(mock_config, "conn") + + # Call the provider + result = session_provider(mock_connection) + + assert result == "mock_session" + mock_portal_class.assert_called_once() + mock_portal.start.assert_called_once() + mock_portal.call.assert_called_once() + + +# Error handling tests + + +@patch("sqlspec.extensions.flask._providers.PortalProvider") +def test_provider_error_handling(mock_portal_class: Mock, mock_config: Mock) -> None: + """Test provider error handling.""" + # Setup portal to raise exception + mock_portal = Mock() + mock_portal.is_running = False + mock_portal.start = Mock() + mock_portal.call = Mock(side_effect=Exception("Portal error")) + mock_portal_class.return_value = mock_portal + + pool_provider = create_pool_provider(mock_config, "test_pool") + + with pytest.raises(Exception, match="Portal error"): + pool_provider() + + +# Type annotation tests + + +def test_pool_provider_type_hints() -> None: + """Test that pool provider has correct type hints.""" + import inspect + + # Get the function signature + sig = inspect.signature(create_pool_provider) + + # Check parameter types + assert "config" in sig.parameters + assert "pool_key" in sig.parameters + + # The return type should be a callable + provider = create_pool_provider(Mock(), "test") + assert callable(provider) + + +def test_connection_provider_type_hints() -> None: + """Test that connection provider has correct type hints.""" + import inspect + + sig = inspect.signature(create_connection_provider) + + assert "config" in sig.parameters + assert "pool_key" in sig.parameters + assert "connection_key" in sig.parameters + + provider = create_connection_provider(Mock(), "pool", "conn") + assert callable(provider) + + +def test_session_provider_type_hints() -> None: + """Test that session provider has correct type hints.""" + import inspect + + sig = inspect.signature(create_session_provider) + + assert "config" in sig.parameters + assert "connection_key" in sig.parameters + + provider = create_session_provider(Mock(), "conn") + assert callable(provider) diff --git a/tests/unit/test_extensions/test_sanic/__init__.py b/tests/unit/test_extensions/test_sanic/__init__.py new file mode 100644 index 000000000..c557e3577 --- /dev/null +++ b/tests/unit/test_extensions/test_sanic/__init__.py @@ -0,0 +1 @@ +"""Unit tests for SQLSpec Sanic extension.""" diff --git a/tests/unit/test_extensions/test_sanic/test_middleware.py b/tests/unit/test_extensions/test_sanic/test_middleware.py new file mode 100644 index 000000000..ab397bd9d --- /dev/null +++ b/tests/unit/test_extensions/test_sanic/test_middleware.py @@ -0,0 +1,293 @@ +"""Unit tests for Sanic middleware.""" + +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from sqlspec.adapters.sqlite import SqliteConfig, SqliteConnectionParams +from sqlspec.extensions.sanic._middleware import SessionMiddleware +from sqlspec.extensions.sanic.config import DatabaseConfig + + +@pytest.fixture +def sqlite_config(): + """Create a SQLite configuration for testing.""" + return SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + +@pytest.fixture +def database_config(sqlite_config): + """Create a database configuration for testing.""" + return DatabaseConfig(config=sqlite_config) + + +@pytest.fixture +def mock_request(): + """Create a mock Sanic request object.""" + request = Mock() + request.id = "test-request-123" + request.ctx = Mock() + return request + + +@pytest.fixture +def mock_response(): + """Create a mock Sanic response object.""" + response = Mock() + response.status = 200 + return response + + +@pytest.fixture +def session_middleware(database_config): + """Create a SessionMiddleware instance for testing.""" + return SessionMiddleware(database_config) + + +class TestSessionMiddlewareInit: + """Test SessionMiddleware initialization.""" + + def test_middleware_initialization(self, database_config): + """Test that middleware initializes with database config.""" + middleware = SessionMiddleware(database_config) + + assert middleware.database_config is database_config + assert middleware._connection_key == database_config.connection_key + assert middleware._session_key == database_config.session_key + + +class TestBeforeRequest: + """Test before_request middleware functionality.""" + + async def test_before_request_skips_existing_connection(self, session_middleware, mock_request): + """Test that before_request skips setup if connection already exists.""" + # Set up existing connection + setattr(mock_request.ctx, session_middleware._connection_key, Mock()) + + await session_middleware.before_request(mock_request) + + # Should not have created additional connections + # This test verifies the early return path + + @patch("sqlspec.extensions.sanic._middleware.ensure_async_") + async def test_before_request_creates_connection_with_provider(self, mock_ensure_async, session_middleware, mock_request): + """Test that before_request creates connection using provider.""" + # Set up mocks + mock_connection = Mock() + mock_connection_gen = AsyncMock() + mock_connection_gen.__anext__ = AsyncMock(return_value=mock_connection) + + session_middleware.database_config.connection_provider = AsyncMock(return_value=mock_connection_gen) + + await session_middleware.before_request(mock_request) + + # Verify connection was set + assert getattr(mock_request.ctx, session_middleware._connection_key) is mock_connection + + # Verify generator was stored for cleanup + assert hasattr(mock_request.ctx, f"_{session_middleware._connection_key}_gen") + + @patch("sqlspec.extensions.sanic._middleware.ensure_async_") + async def test_before_request_creates_connection_fallback(self, mock_ensure_async, session_middleware, mock_request): + """Test that before_request creates connection using fallback method.""" + # Set up mocks for fallback path + session_middleware.database_config.connection_provider = None + + mock_pool = Mock() + mock_ensure_async.return_value = AsyncMock(return_value=mock_pool) + + mock_connection_cm = Mock() + mock_connection_cm.__aenter__ = AsyncMock(return_value=Mock()) + session_middleware.database_config.config.provide_connection = Mock(return_value=mock_connection_cm) + session_middleware.database_config.config.create_pool = Mock() + + await session_middleware.before_request(mock_request) + + # Verify pool creation was called + mock_ensure_async.assert_called_once() + + async def test_before_request_creates_session_with_provider(self, session_middleware, mock_request): + """Test that before_request creates session using session provider.""" + # Set up existing connection + mock_connection = Mock() + setattr(mock_request.ctx, session_middleware._connection_key, mock_connection) + + # Set up session provider + mock_session = Mock() + mock_session_gen = AsyncMock() + mock_session_gen.__anext__ = AsyncMock(return_value=mock_session) + + session_middleware.database_config.session_provider = AsyncMock(return_value=mock_session_gen) + + await session_middleware.before_request(mock_request) + + # Verify session was created + assert getattr(mock_request.ctx, session_middleware._session_key) is mock_session + + async def test_before_request_handles_exceptions(self, session_middleware, mock_request, caplog): + """Test that before_request handles exceptions properly.""" + # Set up connection provider to raise exception + session_middleware.database_config.connection_provider = AsyncMock(side_effect=Exception("Test error")) + + with pytest.raises(Exception, match="Test error"): + await session_middleware.before_request(mock_request) + + +class TestAfterResponse: + """Test after_response middleware functionality.""" + + async def test_after_response_cleanup_session_generator(self, session_middleware, mock_request, mock_response): + """Test that after_response cleans up session generator.""" + # Set up session generator + mock_session_gen = AsyncMock() + mock_session_gen.__anext__ = AsyncMock(side_effect=StopAsyncIteration) + setattr(mock_request.ctx, f"_{session_middleware._session_key}_gen", mock_session_gen) + setattr(mock_request.ctx, session_middleware._session_key, Mock()) + + await session_middleware.after_response(mock_request, mock_response) + + # Verify session generator cleanup was attempted + mock_session_gen.__anext__.assert_called_once() + + async def test_after_response_transaction_commit_on_success(self, session_middleware, mock_request, mock_response): + """Test that after_response commits transaction on successful response.""" + # Set up successful response + mock_response.status = 200 + session_middleware.database_config.commit_mode = "autocommit" + + # Set up connection with commit method + mock_connection = Mock() + mock_connection.commit = AsyncMock() + setattr(mock_request.ctx, session_middleware._connection_key, mock_connection) + + await session_middleware.after_response(mock_request, mock_response) + + # Verify commit was called + mock_connection.commit.assert_called_once() + + async def test_after_response_transaction_rollback_on_error(self, session_middleware, mock_request, mock_response): + """Test that after_response rolls back transaction on error response.""" + # Set up error response + mock_response.status = 500 + session_middleware.database_config.commit_mode = "autocommit" + + # Set up connection with rollback method + mock_connection = Mock() + mock_connection.rollback = AsyncMock() + setattr(mock_request.ctx, session_middleware._connection_key, mock_connection) + + await session_middleware.after_response(mock_request, mock_response) + + # Verify rollback was called + mock_connection.rollback.assert_called_once() + + async def test_after_response_manual_commit_mode_skips_transaction(self, session_middleware, mock_request, mock_response): + """Test that after_response skips transaction handling in manual mode.""" + # Set up manual commit mode + session_middleware.database_config.commit_mode = "manual" + + # Set up connection + mock_connection = Mock() + mock_connection.commit = AsyncMock() + mock_connection.rollback = AsyncMock() + setattr(mock_request.ctx, session_middleware._connection_key, mock_connection) + + await session_middleware.after_response(mock_request, mock_response) + + # Verify neither commit nor rollback was called + mock_connection.commit.assert_not_called() + mock_connection.rollback.assert_not_called() + + async def test_after_response_connection_cleanup(self, session_middleware, mock_request, mock_response): + """Test that after_response cleans up connection resources.""" + # Set up connection with close method + mock_connection = Mock() + mock_connection.close = AsyncMock() + setattr(mock_request.ctx, session_middleware._connection_key, mock_connection) + + await session_middleware.after_response(mock_request, mock_response) + + # Verify connection was closed + mock_connection.close.assert_called_once() + + async def test_after_response_handles_transaction_exceptions(self, session_middleware, mock_request, mock_response, caplog): + """Test that after_response handles transaction exceptions gracefully.""" + session_middleware.database_config.commit_mode = "autocommit" + mock_response.status = 200 + + # Set up connection that raises exception on commit + mock_connection = Mock() + mock_connection.commit = AsyncMock(side_effect=Exception("Commit failed")) + mock_connection.rollback = AsyncMock() + setattr(mock_request.ctx, session_middleware._connection_key, mock_connection) + + # Should not raise exception + await session_middleware.after_response(mock_request, mock_response) + + # Verify rollback was attempted after commit failure + mock_connection.rollback.assert_called_once() + + +class TestShouldCommitTransaction: + """Test _should_commit_transaction logic.""" + + def test_manual_mode_never_commits(self, session_middleware): + """Test that manual mode never commits automatically.""" + session_middleware.database_config.commit_mode = "manual" + + assert not session_middleware._should_commit_transaction(200) + assert not session_middleware._should_commit_transaction(500) + + def test_autocommit_mode_commits_on_success(self, session_middleware): + """Test that autocommit mode commits on successful status codes.""" + session_middleware.database_config.commit_mode = "autocommit" + + # Success codes should commit + assert session_middleware._should_commit_transaction(200) + assert session_middleware._should_commit_transaction(201) + assert session_middleware._should_commit_transaction(299) + + # Redirect codes should not commit + assert not session_middleware._should_commit_transaction(300) + assert not session_middleware._should_commit_transaction(302) + + # Error codes should not commit + assert not session_middleware._should_commit_transaction(400) + assert not session_middleware._should_commit_transaction(500) + + def test_autocommit_include_redirect_mode(self, session_middleware): + """Test that autocommit_include_redirect mode commits on success and redirect.""" + session_middleware.database_config.commit_mode = "autocommit_include_redirect" + + # Success and redirect codes should commit + assert session_middleware._should_commit_transaction(200) + assert session_middleware._should_commit_transaction(302) + assert session_middleware._should_commit_transaction(399) + + # Error codes should not commit + assert not session_middleware._should_commit_transaction(400) + assert not session_middleware._should_commit_transaction(500) + + def test_extra_commit_statuses_override(self, session_middleware): + """Test that extra_commit_statuses override default behavior.""" + session_middleware.database_config.commit_mode = "autocommit" + session_middleware.database_config.extra_commit_statuses = {500, 503} + + # Error codes in extra_commit_statuses should commit + assert session_middleware._should_commit_transaction(500) + assert session_middleware._should_commit_transaction(503) + + # Other error codes should not commit + assert not session_middleware._should_commit_transaction(404) + + def test_extra_rollback_statuses_override(self, session_middleware): + """Test that extra_rollback_statuses override default behavior.""" + session_middleware.database_config.commit_mode = "autocommit" + session_middleware.database_config.extra_rollback_statuses = {200, 201} + + # Success codes in extra_rollback_statuses should not commit + assert not session_middleware._should_commit_transaction(200) + assert not session_middleware._should_commit_transaction(201) + + # Other success codes should commit + assert session_middleware._should_commit_transaction(202) diff --git a/tests/unit/test_extensions/test_sanic/test_providers.py b/tests/unit/test_extensions/test_sanic/test_providers.py new file mode 100644 index 000000000..d00426b95 --- /dev/null +++ b/tests/unit/test_extensions/test_sanic/test_providers.py @@ -0,0 +1,298 @@ +"""Unit tests for Sanic providers.""" + +from unittest.mock import Mock + +import pytest + +from sqlspec.adapters.sqlite import SqliteConfig, SqliteConnectionParams +from sqlspec.extensions.sanic.config import DatabaseConfig +from sqlspec.extensions.sanic.providers import ( + create_filter_provider, + create_service_provider, + provide_connection, + provide_filters, + provide_pool, + provide_service, + provide_session, +) + + +@pytest.fixture +def mock_request(): + """Create a mock Sanic request object.""" + request = Mock() + request.args = {"limit": "10", "offset": "0", "search": "test"} + request.ctx = Mock() + request.app = Mock() + request.app.ctx = Mock() + return request + + +@pytest.fixture +def mock_sqlspec(): + """Create a mock SQLSpec instance.""" + sqlspec = Mock() + sqlspec._configs = [] + return sqlspec + + +@pytest.fixture +def sqlite_config(): + """Create a SQLite configuration for testing.""" + return SqliteConfig(pool_config=SqliteConnectionParams(database=":memory:")) + + +@pytest.fixture +def database_config(sqlite_config): + """Create a database configuration for testing.""" + return DatabaseConfig(config=sqlite_config) + + +class TestProvideService: + """Test the provide_service function.""" + + def test_provide_service_creates_provider(self): + """Test that provide_service creates a provider function.""" + provider = provide_service(str, None) + assert callable(provider) + + def test_provider_requires_sqlspec_in_app_context(self, mock_request): + """Test that provider requires SQLSpec in app context.""" + mock_request.app.ctx.sqlspec = None + + provider = provide_service(str, None) + + with pytest.raises(RuntimeError, match="SQLSpec not initialized"): + provider(mock_request) + + def test_provider_uses_first_config_when_none_provided(self, mock_request, mock_sqlspec, database_config): + """Test that provider uses first config when none provided.""" + mock_sqlspec._configs = [database_config] + mock_sqlspec.get_session.return_value = Mock() + mock_request.app.ctx.sqlspec = mock_sqlspec + + provider = provide_service(str, None) + + # Mock the service class to avoid actual instantiation + with pytest.raises(TypeError): # str() doesn't accept session parameter + provider(mock_request) + + def test_provider_with_cache_key_uses_singleton(self, mock_request, mock_sqlspec, database_config): + """Test that provider with cache key uses singleton pattern.""" + mock_sqlspec._configs = [database_config] + mock_sqlspec.get_session.return_value = Mock() + mock_request.app.ctx.sqlspec = mock_sqlspec + + provider = provide_service(str, None, cache_key="test_service") + + # This would use get_cached_instance, behavior depends on implementation + with pytest.raises(TypeError): # str() doesn't accept session parameter + provider(mock_request) + + +class TestProvideFilters: + """Test the provide_filters function.""" + + def test_provide_filters_creates_provider(self): + """Test that provide_filters creates a provider function.""" + filter_types = [dict] + provider = provide_filters(filter_types) + assert callable(provider) + + def test_provider_processes_single_filter_type(self, mock_request): + """Test provider processes single filter type.""" + provider = provide_filters(dict) + result = provider(mock_request) + + assert isinstance(result, dict) + assert "dict" in result + assert isinstance(result["dict"], dict) + + def test_provider_processes_multiple_filter_types(self, mock_request): + """Test provider processes multiple filter types.""" + filter_types = [dict, list] + provider = provide_filters(filter_types) + result = provider(mock_request) + + assert isinstance(result, dict) + assert "dict" in result + assert "list" in result + + def test_provider_applies_field_transformations(self, mock_request): + """Test that provider applies field transformations.""" + mock_request.args = {"field_slug": "test-value", "other": "normal"} + + class TestFilter: + def __init__(self): + self.field_slug = None + self.other = None + + provider = provide_filters([TestFilter]) + result = provider(mock_request) + + filter_instance = result["testfilter"] + assert hasattr(filter_instance, "field_slug") + assert hasattr(filter_instance, "other") + + +class TestProvideConnection: + """Test the provide_connection function.""" + + def test_provide_connection_creates_provider(self): + """Test that provide_connection creates a provider function.""" + provider = provide_connection() + assert callable(provider) + + def test_provider_requires_sqlspec_in_app_context(self, mock_request): + """Test that provider requires SQLSpec in app context.""" + mock_request.app.ctx.sqlspec = None + + provider = provide_connection() + + with pytest.raises(RuntimeError, match="SQLSpec not initialized"): + provider(mock_request) + + def test_provider_requires_connection_in_request_context(self, mock_request, mock_sqlspec, database_config): + """Test that provider requires connection in request context.""" + mock_sqlspec._configs = [database_config] + mock_request.app.ctx.sqlspec = mock_sqlspec + mock_request.ctx.db_connection = None + + provider = provide_connection() + + with pytest.raises(RuntimeError, match="No connection available"): + provider(mock_request) + + def test_provider_returns_connection_from_context(self, mock_request, mock_sqlspec, database_config): + """Test that provider returns connection from request context.""" + mock_connection = Mock() + mock_sqlspec._configs = [database_config] + mock_request.app.ctx.sqlspec = mock_sqlspec + mock_request.ctx.db_connection = mock_connection + + provider = provide_connection() + result = provider(mock_request) + + assert result is mock_connection + + +class TestProvidePool: + """Test the provide_pool function.""" + + def test_provide_pool_creates_provider(self): + """Test that provide_pool creates a provider function.""" + provider = provide_pool() + assert callable(provider) + + def test_provider_requires_sqlspec_in_app_context(self, mock_request): + """Test that provider requires SQLSpec in app context.""" + mock_request.app.ctx.sqlspec = None + + provider = provide_pool() + + with pytest.raises(RuntimeError, match="SQLSpec not initialized"): + provider(mock_request) + + def test_provider_gets_pool_from_app_context(self, mock_request, mock_sqlspec, database_config): + """Test that provider gets pool from app context.""" + mock_pool = Mock() + mock_sqlspec._configs = [database_config] + mock_sqlspec.get_engine.return_value = mock_pool + mock_request.app.ctx.sqlspec = mock_sqlspec + + provider = provide_pool() + result = provider(mock_request) + + assert result is mock_pool + + +class TestProvideSession: + """Test the provide_session function.""" + + def test_provide_session_creates_provider(self): + """Test that provide_session creates a provider function.""" + provider = provide_session() + assert callable(provider) + + def test_provider_requires_sqlspec_in_app_context(self, mock_request): + """Test that provider requires SQLSpec in app context.""" + mock_request.app.ctx.sqlspec = None + + provider = provide_session() + + with pytest.raises(RuntimeError, match="SQLSpec not initialized"): + provider(mock_request) + + def test_provider_gets_session_from_sqlspec(self, mock_request, mock_sqlspec, database_config): + """Test that provider gets session from SQLSpec.""" + mock_session = Mock() + mock_sqlspec._configs = [database_config] + mock_sqlspec.get_session.return_value = mock_session + mock_request.app.ctx.sqlspec = mock_sqlspec + + provider = provide_session(database_config) + result = provider(mock_request) + + assert result is mock_session + + +class TestCreateServiceProvider: + """Test the create_service_provider function.""" + + def test_create_service_provider_creates_provider(self): + """Test that create_service_provider creates a provider function.""" + provider = create_service_provider(str) + assert callable(provider) + + def test_provider_with_config_key_lookup(self, mock_request, mock_sqlspec, database_config): + """Test provider with configuration key lookup.""" + mock_sqlspec._configs = [database_config] + mock_sqlspec.get_config.return_value = database_config + mock_sqlspec.get_session.return_value = Mock() + mock_request.app.ctx.sqlspec = mock_sqlspec + + provider = create_service_provider(str, config_key="test_config") + + with pytest.raises(TypeError): # str() constructor issue + provider(mock_request) + + # Verify get_config was called + mock_sqlspec.get_config.assert_called_once_with("test_config") + + +class TestCreateFilterProvider: + """Test the create_filter_provider function.""" + + def test_create_filter_provider_creates_provider(self): + """Test that create_filter_provider creates a provider function.""" + provider = create_filter_provider(dict) + assert callable(provider) + + def test_provider_applies_field_mapping(self, mock_request): + """Test that provider applies field mapping.""" + mock_request.args = {"query_param": "test_value"} + + class TestFilter: + def __init__(self): + self.filter_field = None + + field_mapping = {"query_param": "filter_field"} + provider = create_filter_provider(TestFilter, field_mapping=field_mapping) + result = provider(mock_request) + + assert hasattr(result, "filter_field") + assert result.filter_field == "test_value" + + def test_provider_handles_missing_attributes(self, mock_request): + """Test that provider handles missing attributes gracefully.""" + mock_request.args = {"nonexistent_param": "value"} + + class TestFilter: + def __init__(self): + pass + + provider = create_filter_provider(TestFilter) + result = provider(mock_request) + + # Should create filter instance even if parameters don't match + assert isinstance(result, TestFilter) diff --git a/tests/unit/test_utils/test_portal.py b/tests/unit/test_utils/test_portal.py new file mode 100644 index 000000000..3aa465014 --- /dev/null +++ b/tests/unit/test_utils/test_portal.py @@ -0,0 +1,461 @@ +"""Tests for portal provider functionality.""" + +import asyncio +import threading + +import pytest + +from sqlspec.exceptions import ImproperConfigurationError +from sqlspec.utils.portal import Portal, PortalProvider, PortalProviderSingleton + +# Portal Provider Singleton Tests + + +def test_singleton_same_loop() -> None: + """Test that singleton returns same instance for same loop.""" + provider1 = PortalProvider() + provider2 = PortalProvider() + + assert provider1 is provider2 + + +def test_singleton_different_loops() -> None: + """Test that singleton returns different instances for different loops.""" + loop1 = asyncio.new_event_loop() + loop2 = asyncio.new_event_loop() + + try: + provider1 = PortalProvider(loop=loop1) + provider2 = PortalProvider(loop=loop2) + + assert provider1 is not provider2 + finally: + loop1.close() + loop2.close() + + +def test_singleton_clear_instances() -> None: + """Test clearing singleton instances.""" + # Clear instances for testing + PortalProviderSingleton._instances.clear() + + provider1 = PortalProvider() + provider2 = PortalProvider() + + assert provider1 is provider2 + assert len(PortalProviderSingleton._instances) == 1 + + +# Portal Provider Tests + + +def test_portal_provider_initialization() -> None: + """Test portal provider initializes correctly.""" + provider = PortalProvider() + + assert provider._loop is None + assert provider._thread is None + assert not provider.is_running + assert not provider.is_ready + + +def test_portal_provider_with_loop() -> None: + """Test portal provider initialization with provided loop.""" + loop = asyncio.new_event_loop() + try: + provider = PortalProvider(loop=loop) + assert provider._loop is loop + finally: + loop.close() + + +def test_portal_property() -> None: + """Test portal property returns Portal instance.""" + provider = PortalProvider() + portal = provider.portal + + assert isinstance(portal, Portal) + assert portal._provider is provider + + +def test_loop_property_not_started() -> None: + """Test loop property raises error when not started.""" + provider = PortalProvider() + + with pytest.raises(ImproperConfigurationError, match="The PortalProvider is not started"): + _ = provider.loop + + +def test_start_portal_success() -> None: + """Test successful portal start.""" + provider = PortalProvider() + provider.start() + + try: + assert provider.is_running + assert provider.is_ready + assert provider._thread is not None + assert provider._loop is not None + assert provider._thread.is_alive() + finally: + provider.stop() + + +def test_start_portal_already_started() -> None: + """Test starting already started portal shows warning.""" + provider = PortalProvider() + provider.start() + + try: + with pytest.warns(UserWarning, match="PortalProvider already started"): + provider.start() + finally: + provider.stop() + + +def test_stop_portal_success() -> None: + """Test successful portal stop.""" + provider = PortalProvider() + provider.start() + + assert provider.is_running + + provider.stop() + + assert not provider.is_running + assert not provider.is_ready + assert provider._loop is None + assert provider._thread is None + + +def test_stop_portal_not_started() -> None: + """Test stopping a portal that's not started.""" + provider = PortalProvider() + + # Should not raise exception + provider.stop() + + assert not provider.is_running + assert not provider.is_ready + + +def test_call_success() -> None: + """Test successful async function call through portal.""" + provider = PortalProvider() + provider.start() + + try: + + async def test_func(arg1: str, arg2: int = 42) -> str: + await asyncio.sleep(0.01) # Simulate async work + return f"{arg1}_{arg2}" + + result = provider.call(test_func, "hello", arg2=100) + + assert result == "hello_100" + finally: + provider.stop() + + +def test_call_portal_not_started() -> None: + """Test calling function when portal is not started.""" + provider = PortalProvider() + + async def test_func() -> str: + return "test" + + with pytest.raises(ImproperConfigurationError, match="The PortalProvider is not started"): + provider.call(test_func) + + +def test_call_with_exception() -> None: + """Test calling function that raises exception.""" + provider = PortalProvider() + provider.start() + + try: + + async def test_func() -> None: + await asyncio.sleep(0.01) + raise ValueError("Test exception") + + with pytest.raises(ValueError, match="Test exception"): + provider.call(test_func) + finally: + provider.stop() + + +def test_concurrent_calls() -> None: + """Test multiple concurrent calls through portal.""" + provider = PortalProvider() + provider.start() + + try: + + async def test_func(value: int) -> int: + await asyncio.sleep(0.01) + return value * 2 + + # Test concurrent calls from multiple threads + results = [] + threads = [] + + def call_func(val: int) -> None: + result = provider.call(test_func, val) + results.append(result) + + for i in range(5): + thread = threading.Thread(target=call_func, args=(i,)) + threads.append(thread) + thread.start() + + for thread in threads: + thread.join() + + assert sorted(results) == [0, 2, 4, 6, 8] + finally: + provider.stop() + + +# Portal Tests + + +def test_portal_initialization() -> None: + """Test portal initializes correctly.""" + provider = PortalProvider() + portal = Portal(provider) + + assert portal._provider is provider + + +def test_portal_call_delegates_to_provider() -> None: + """Test that portal.call delegates to provider.call.""" + provider = PortalProvider() + provider.start() + + try: + portal = Portal(provider) + + async def test_func(value: str) -> str: + await asyncio.sleep(0.01) + return f"result_{value}" + + result = portal.call(test_func, "test") + + assert result == "result_test" + finally: + provider.stop() + + +def test_portal_call_not_started() -> None: + """Test portal call when provider not started.""" + provider = PortalProvider() + portal = Portal(provider) + + async def test_func() -> str: + return "test" + + with pytest.raises(ImproperConfigurationError): + portal.call(test_func) + + +# Integration Tests + + +def test_complex_async_function() -> None: + """Test calling complex async function with multiple operations.""" + provider = PortalProvider() + provider.start() + + try: + + async def complex_func(items: list[int]) -> dict[str, int]: + result = {} + for item in items: + await asyncio.sleep(0.001) # Simulate async work + result[f"item_{item}"] = item * item + return result + + result = provider.call(complex_func, [1, 2, 3, 4]) + expected = {"item_1": 1, "item_2": 4, "item_3": 9, "item_4": 16} + + assert result == expected + finally: + provider.stop() + + +def test_async_context_manager_simulation() -> None: + """Test async function that simulates context manager behavior.""" + provider = PortalProvider() + provider.start() + + try: + + async def simulate_context_manager() -> str: + # Simulate acquiring resource + await asyncio.sleep(0.01) + try: + # Simulate work with resource + await asyncio.sleep(0.01) + return "resource_result" + finally: + # Simulate cleanup + await asyncio.sleep(0.01) + + result = provider.call(simulate_context_manager) + assert result == "resource_result" + finally: + provider.stop() + + +def test_portal_lifecycle_management() -> None: + """Test proper lifecycle management of portal.""" + provider = PortalProvider() + + # Initially not running + assert not provider.is_running + assert not provider.is_ready + + # Start portal + provider.start() + assert provider.is_running + assert provider.is_ready + + # Use portal + async def test_func() -> str: + return "success" + + result = provider.call(test_func) + assert result == "success" + + # Stop portal + provider.stop() + assert not provider.is_running + assert not provider.is_ready + + # Cannot use after stop + with pytest.raises(ImproperConfigurationError): + provider.call(test_func) + + +def test_error_propagation() -> None: + """Test that errors are properly propagated from async functions.""" + provider = PortalProvider() + provider.start() + + try: + + async def error_func(error_type: type[Exception], message: str) -> None: + await asyncio.sleep(0.01) + raise error_type(message) + + # Test different exception types + with pytest.raises(ValueError, match="Value error"): + provider.call(error_func, ValueError, "Value error") + + with pytest.raises(RuntimeError, match="Runtime error"): + provider.call(error_func, RuntimeError, "Runtime error") + + with pytest.raises(KeyError, match="Key error"): + provider.call(error_func, KeyError, "Key error") + finally: + provider.stop() + + +# Edge Cases + + +def test_stop_before_start() -> None: + """Test stopping portal before starting.""" + provider = PortalProvider() + provider.stop() # Should not raise exception + + assert not provider.is_running + assert not provider.is_ready + + +def test_multiple_stops() -> None: + """Test stopping portal multiple times.""" + provider = PortalProvider() + provider.start() + + provider.stop() + provider.stop() # Should not raise exception + + assert not provider.is_running + + +def test_rapid_start_stop_cycles() -> None: + """Test rapid start/stop cycles.""" + provider = PortalProvider() + + for _ in range(3): + provider.start() + assert provider.is_running + + async def test_func() -> str: + return "test" + + result = provider.call(test_func) + assert result == "test" + + provider.stop() + assert not provider.is_running + + +def test_call_with_keyword_only_args() -> None: + """Test calling function with keyword-only arguments.""" + provider = PortalProvider() + provider.start() + + try: + + async def test_func(*, name: str, value: int) -> str: + await asyncio.sleep(0.01) + return f"{name}={value}" + + result = provider.call(test_func, name="test", value=42) + assert result == "test=42" + finally: + provider.stop() + + +def test_call_returns_none() -> None: + """Test calling function that returns None.""" + provider = PortalProvider() + provider.start() + + try: + + async def test_func() -> None: + await asyncio.sleep(0.01) + return + + result = provider.call(test_func) + assert result is None + finally: + provider.stop() + + +def test_global_portal_provider_reset() -> None: + """Test resetting the global portal provider for testing.""" + import sqlspec.utils.portal as portal_module + + # Store original + original = portal_module.PortalProviderSingleton._instances.copy() + + try: + # Reset instances + portal_module.PortalProviderSingleton._instances.clear() + + # Get new instance + provider1 = PortalProvider() + provider2 = PortalProvider() + + assert provider1 is provider2 + assert len(portal_module.PortalProviderSingleton._instances) == 1 + + finally: + # Restore original + portal_module.PortalProviderSingleton._instances.clear() + portal_module.PortalProviderSingleton._instances.update(original) diff --git a/uv.lock b/uv.lock index b041ec104..562ace916 100644 --- a/uv.lock +++ b/uv.lock @@ -118,6 +118,33 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b4/18/c857aecc1b80c02bb0b9af8464ef7c250caab2a0120a68f56b4501db32f6/adbc_driver_sqlite-1.7.0-py3-none-win_amd64.whl", hash = "sha256:d70f05a1d737ac477564e8810985101d6e8c6e632f790e396531ece8d3a93248", size = 867977, upload-time = "2025-07-07T06:23:06.155Z" }, ] +[[package]] +name = "aiobotocore" +version = "2.24.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "aioitertools" }, + { name = "botocore" }, + { name = "jmespath" }, + { name = "multidict" }, + { name = "python-dateutil" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/02/b4ed1af4b3437c2fc6e6111e7fdee011b34cf1c0cc8f314474f843e10019/aiobotocore-2.24.1.tar.gz", hash = "sha256:59237f1b2d4ff619f9a9e78360b691d59b92fdd4d03d054dbd2eeff8ada5667e", size = 119754, upload-time = "2025-08-15T15:49:53.209Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/26/c3c93209084e24990ad1b4214f67dce1c0183454cec9cd2cad9433f493bb/aiobotocore-2.24.1-py3-none-any.whl", hash = "sha256:557922823455ca65bbd065b363b54846f16b9c4b6bd0b61ecdfa01ca13a04531", size = 85216, upload-time = "2025-08-15T15:49:51.442Z" }, +] + +[[package]] +name = "aiofiles" +version = "24.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -230,6 +257,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/25/e0cf8793aedc41c6d7f2aad646a27e27bdacafe3b402bb373d7651c94d73/aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8", size = 453370, upload-time = "2025-07-29T05:52:29.936Z" }, ] +[[package]] +name = "aioitertools" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/de/38491a84ab323b47c7f86e94d2830e748780525f7a10c8600b67ead7e9ea/aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b", size = 19369, upload-time = "2024-09-02T03:33:40.349Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/13/58b70a580de00893223d61de8fea167877a3aed97d4a5e1405c9159ef925/aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796", size = 24345, upload-time = "2024-09-02T03:34:59.454Z" }, +] + [[package]] name = "aioodbc" version = "0.5.0" @@ -641,6 +680,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, ] +[[package]] +name = "botocore" +version = "1.39.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/d0/9d64261186cff650fe63168441edb4f4cd33f085a74c0c54455630a71f91/botocore-1.39.11.tar.gz", hash = "sha256:953b12909d6799350e346ab038e55b6efe622c616f80aef74d7a6683ffdd972c", size = 14217749, upload-time = "2025-07-22T19:26:40.723Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/2c/8a0b02d60a1dbbae7faa5af30484b016aa3023f9833dfc0d19b0b770dd6a/botocore-1.39.11-py3-none-any.whl", hash = "sha256:1545352931a8a186f3e977b1e1a4542d7d434796e274c3c62efd0210b5ea76dc", size = 13876276, upload-time = "2025-07-22T19:26:35.164Z" }, +] + [[package]] name = "bracex" version = "2.6" @@ -1089,6 +1143,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/47/290daabcf91628f4fc0e17c75a1690b354ba067066cd14407712600e609f/dict2css-0.3.0.post1-py3-none-any.whl", hash = "sha256:f006a6b774c3e31869015122ae82c491fd25e7de4a75607a62aa3e798f837e0d", size = 25647, upload-time = "2023-11-22T11:09:19.221Z" }, ] +[[package]] +name = "dishka" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/04/f3add05678a3ac1ab7736faae45b18b5365d84b1cd3cf3af64b09a1d6a5f/dishka-1.6.0.tar.gz", hash = "sha256:f1fa5ec7e980d4f618d0c425d1bb81d8e9414894d8ec6553b197d2298774e12f", size = 65971, upload-time = "2025-05-18T21:40:53.259Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/6b/f9cd08543c4f55bf129a0ebce5c09e43528235dd6e7cb906761ca094979a/dishka-1.6.0-py3-none-any.whl", hash = "sha256:ab1aedee152ce7bb11cfd2673d7ce4001fe2b330d14e84535d7525a68430b2c2", size = 90789, upload-time = "2025-05-18T21:40:51.352Z" }, +] + [[package]] name = "distlib" version = "0.4.0" @@ -1105,7 +1171,8 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "requests" }, - { name = "urllib3" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } wheels = [ @@ -1493,6 +1560,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" }, ] +[package.optional-dependencies] +s3 = [ + { name = "s3fs" }, +] + [[package]] name = "google-api-core" version = "2.25.1" @@ -1860,6 +1932,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173, upload-time = "2020-06-22T23:32:36.781Z" }, ] +[[package]] +name = "html5tagger" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/02/2ae5f46d517a2c1d4a17f2b1e4834c2c7cc0fb3a69c92389172fa16ab389/html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9", size = 14196, upload-time = "2023-03-28T05:59:34.642Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/12/2f5d43ee912ea14a6baba4b3db6d309b02d932e3b7074c3339b4aded98ff/html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351", size = 10956, upload-time = "2023-03-28T05:59:32.524Z" }, +] + [[package]] name = "httpcore" version = "1.0.9" @@ -1873,6 +1954,49 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] +[[package]] +name = "httptools" +version = "0.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0", size = 198780, upload-time = "2024-10-16T19:44:06.882Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da", size = 103297, upload-time = "2024-10-16T19:44:08.129Z" }, + { url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1", size = 443130, upload-time = "2024-10-16T19:44:09.45Z" }, + { url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50", size = 442148, upload-time = "2024-10-16T19:44:11.539Z" }, + { url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959", size = 415949, upload-time = "2024-10-16T19:44:13.388Z" }, + { url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4", size = 417591, upload-time = "2024-10-16T19:44:15.258Z" }, + { url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c", size = 88344, upload-time = "2024-10-16T19:44:16.54Z" }, + { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" }, + { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" }, + { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" }, + { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" }, + { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" }, + { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, + { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, + { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, + { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, + { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214, upload-time = "2024-10-16T19:44:38.738Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431, upload-time = "2024-10-16T19:44:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121, upload-time = "2024-10-16T19:44:41.189Z" }, + { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805, upload-time = "2024-10-16T19:44:42.384Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858, upload-time = "2024-10-16T19:44:43.959Z" }, + { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042, upload-time = "2024-10-16T19:44:45.071Z" }, + { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682, upload-time = "2024-10-16T19:44:46.46Z" }, + { url = "https://files.pythonhosted.org/packages/51/b1/4fc6f52afdf93b7c4304e21f6add9e981e4f857c2fa622a55dfe21b6059e/httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003", size = 201123, upload-time = "2024-10-16T19:44:59.13Z" }, + { url = "https://files.pythonhosted.org/packages/c2/01/e6ecb40ac8fdfb76607c7d3b74a41b464458d5c8710534d8f163b0c15f29/httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab", size = 104507, upload-time = "2024-10-16T19:45:00.254Z" }, + { url = "https://files.pythonhosted.org/packages/dc/24/c70c34119d209bf08199d938dc9c69164f585ed3029237b4bdb90f673cb9/httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547", size = 449615, upload-time = "2024-10-16T19:45:01.351Z" }, + { url = "https://files.pythonhosted.org/packages/2b/62/e7f317fed3703bd81053840cacba4e40bcf424b870e4197f94bd1cf9fe7a/httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9", size = 448819, upload-time = "2024-10-16T19:45:02.652Z" }, + { url = "https://files.pythonhosted.org/packages/2a/13/68337d3be6b023260139434c49d7aa466aaa98f9aee7ed29270ac7dde6a2/httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076", size = 422093, upload-time = "2024-10-16T19:45:03.765Z" }, + { url = "https://files.pythonhosted.org/packages/fc/b3/3a1bc45be03dda7a60c7858e55b6cd0489a81613c1908fb81cf21d34ae50/httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd", size = 423898, upload-time = "2024-10-16T19:45:05.683Z" }, + { url = "https://files.pythonhosted.org/packages/05/72/2ddc2ae5f7ace986f7e68a326215b2e7c32e32fd40e6428fa8f1d8065c7e/httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6", size = 89552, upload-time = "2024-10-16T19:45:07.566Z" }, +] + [[package]] name = "httpx" version = "0.28.1" @@ -1969,6 +2093,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + [[package]] name = "litestar" version = "2.17.0" @@ -2136,7 +2269,8 @@ dependencies = [ { name = "certifi" }, { name = "pycryptodome" }, { name = "typing-extensions" }, - { name = "urllib3" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f4/a0/33ea2e18d5169817950edc13eba58cd781cedefe9f6696cae26aa2d75882/minio-7.2.16.tar.gz", hash = "sha256:81e365c8494d591d8204a63ee7596bfdf8a7d06ad1b1507d6b9c1664a95f299a", size = 139149, upload-time = "2025-07-21T20:11:15.911Z" } wheels = [ @@ -4163,7 +4297,8 @@ dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, { name = "idna" }, - { name = "urllib3" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ @@ -4310,6 +4445,80 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, ] +[[package]] +name = "s3fs" +version = "2025.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiobotocore" }, + { name = "aiohttp" }, + { name = "fsspec" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/13/37438c4672ba1d23ec46df0e4b57e98469e5c5f4f98313cf6842b631652b/s3fs-2025.7.0.tar.gz", hash = "sha256:5e7f9ec0cad7745155e3eb86fae15b1481fa29946bf5b3a4ce3a60701ce6022d", size = 77795, upload-time = "2025-07-15T16:35:22.177Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/c7/30d13b7fd4f866ca3f30e9a6e7ae038f0c45226f6e26b3cc98d6d197f93b/s3fs-2025.7.0-py3-none-any.whl", hash = "sha256:b6b2d3f84b6aa1c2ba5e62e39dd9410cf54f10a2cce1ea6db1ba0d1a6bcce685", size = 30315, upload-time = "2025-07-15T16:35:20.734Z" }, +] + +[[package]] +name = "sanic" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiofiles" }, + { name = "html5tagger" }, + { name = "httptools" }, + { name = "multidict" }, + { name = "sanic-routing" }, + { name = "setuptools" }, + { name = "tracerite" }, + { name = "typing-extensions" }, + { name = "ujson", marker = "implementation_name == 'cpython' and sys_platform != 'win32'" }, + { name = "uvloop", marker = "implementation_name == 'cpython' and sys_platform != 'win32'" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/8b/08dc376390fe854ef32984973883b646ee68c6727da72ffcc65340d8f192/sanic-25.3.0.tar.gz", hash = "sha256:775d522001ec81f034ec8e4d7599e2175bfc097b8d57884f5e4c9322f5e369bb", size = 353027, upload-time = "2025-03-31T21:22:29.718Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/e1/b36ddc16862d63d22986ae21b04a79c8fb7ec48d5d664acdfd1c2acf78ac/sanic-25.3.0-py3-none-any.whl", hash = "sha256:fb519b38b4c220569b0e2e868583ffeaffaab96a78b2e42ae78bc56a644a4cd7", size = 246416, upload-time = "2025-03-31T21:22:27.946Z" }, +] + +[package.optional-dependencies] +ext = [ + { name = "sanic-ext" }, +] + +[[package]] +name = "sanic-ext" +version = "24.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/c6/f5f87268e72825e3cd39c5b833996a2ac47f98b888f4253c5830afebd057/sanic_ext-24.12.0.tar.gz", hash = "sha256:8f912f4c29f242bc638346d09b79f0c8896ff64e79bd0e7fa09eac4b6c0e23c8", size = 66209, upload-time = "2025-03-05T07:24:39.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3f/4c23be085bce45defd3863cbc707227fc82f49e7d9a5e1bb2656e2e1a2ed/sanic_ext-24.12.0-py3-none-any.whl", hash = "sha256:861f809f071770cf28acd5f13e97ed59985e07361b13b4b4540da1333730c83e", size = 96445, upload-time = "2025-03-05T07:24:38.059Z" }, +] + +[[package]] +name = "sanic-routing" +version = "23.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/5c/2a7edd14fbccca3719a8d680951d4b25f986752c781c61ccf156a6d1ebff/sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04", size = 29473, upload-time = "2023-12-31T09:28:36.992Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/e3/3425c9a8773807ac2c01d6a56c8521733f09b627e5827e733c5cd36b9ac5/sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73", size = 25522, upload-time = "2023-12-31T09:28:35.233Z" }, +] + +[[package]] +name = "sanic-testing" +version = "24.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/56/8d31d8a7e0b61633d6358694edfae976e69739b5bd640ceac7989b62e749/sanic_testing-24.6.0.tar.gz", hash = "sha256:7591ce537e2a651efb6dc01b458e7e4ea5347f6d91438676774c6f505a124731", size = 10871, upload-time = "2024-06-30T12:13:31.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/93/1d588f1cb9b710b9f22fa78b53d699a8062edc94204d50dd0d78c5f5b495/sanic_testing-24.6.0-py3-none-any.whl", hash = "sha256:b1027184735e88230891aa0461fff84093abfa3bff0f4d29c0f78f42e59efada", size = 10326, upload-time = "2024-06-30T12:13:30.014Z" }, +] + [[package]] name = "setuptools" version = "80.9.0" @@ -4679,7 +4888,7 @@ dependencies = [ { name = "pygments", marker = "python_full_version >= '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "urllib3", marker = "python_full_version >= '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/34/fe/ac4e24f35b5148b31ac717ae7dcc7a2f7ec56eb729e22c7252ed8ad2d9a5/sphinx_prompt-1.9.0.tar.gz", hash = "sha256:471b3c6d466dce780a9b167d9541865fd4e9a80ed46e31b06a52a0529ae995a1", size = 5340, upload-time = "2024-08-07T15:46:51.428Z" } wheels = [ @@ -5066,6 +5275,9 @@ pymssql = [ pymysql = [ { name = "pymysql" }, ] +sanic = [ + { name = "sanic", extra = ["ext"] }, +] spanner = [ { name = "google-cloud-spanner" }, ] @@ -5099,7 +5311,9 @@ dev = [ { name = "auto-pytabs", extra = ["sphinx"] }, { name = "bump-my-version" }, { name = "coverage" }, + { name = "dishka", marker = "python_full_version >= '3.10'" }, { name = "duckdb" }, + { name = "fsspec", extra = ["s3"] }, { name = "hatch-mypyc" }, { name = "mypy" }, { name = "myst-parser", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -5110,6 +5324,7 @@ dev = [ { name = "psutil" }, { name = "pyarrow" }, { name = "pyarrow-stubs" }, + { name = "pydantic-extra-types" }, { name = "pydantic-settings" }, { name = "pyright" }, { name = "pytest" }, @@ -5123,6 +5338,7 @@ dev = [ { name = "rich" }, { name = "rich-click" }, { name = "ruff" }, + { name = "sanic-testing" }, { name = "shibuya" }, { name = "slotscheck" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -5174,9 +5390,13 @@ extras = [ { name = "adbc-driver-manager" }, { name = "adbc-driver-postgresql" }, { name = "adbc-driver-sqlite" }, + { name = "dishka", marker = "python_full_version >= '3.10'" }, + { name = "fsspec", extra = ["s3"] }, { name = "pgvector" }, { name = "polars" }, { name = "pyarrow" }, + { name = "pydantic-extra-types" }, + { name = "sanic-testing" }, ] lint = [ { name = "asyncpg-stubs" }, @@ -5245,12 +5465,14 @@ requires-dist = [ { name = "pymysql", marker = "extra == 'pymysql'" }, { name = "rich-click" }, { name = "rich-click", marker = "extra == 'cli'" }, + { name = "sanic", marker = "extra == 'sanic'" }, + { name = "sanic", extras = ["ext"], marker = "extra == 'sanic'", specifier = ">=24.6.0" }, { name = "sqlglot", specifier = ">=19.9.0" }, { name = "sqlglot", extras = ["rs"], marker = "extra == 'performance'" }, { name = "typing-extensions" }, { name = "uuid-utils", marker = "extra == 'uuid'" }, ] -provides-extras = ["adbc", "aioodbc", "aiosql", "aiosqlite", "asyncmy", "asyncpg", "attrs", "bigquery", "cli", "duckdb", "fastapi", "flask", "fsspec", "litestar", "msgspec", "mypyc", "nanoid", "obstore", "opentelemetry", "oracledb", "orjson", "pandas", "performance", "polars", "prometheus", "psqlpy", "psycopg", "pydantic", "pymssql", "pymysql", "spanner", "uuid"] +provides-extras = ["adbc", "aioodbc", "aiosql", "aiosqlite", "asyncmy", "asyncpg", "attrs", "bigquery", "cli", "duckdb", "fastapi", "flask", "fsspec", "litestar", "msgspec", "mypyc", "nanoid", "obstore", "opentelemetry", "oracledb", "orjson", "pandas", "performance", "polars", "prometheus", "psqlpy", "psycopg", "pydantic", "pymssql", "pymysql", "sanic", "spanner", "uuid"] [package.metadata.requires-dev] benchmarks = [ @@ -5278,7 +5500,9 @@ dev = [ { name = "auto-pytabs", extras = ["sphinx"], specifier = ">=0.5.0" }, { name = "bump-my-version" }, { name = "coverage", specifier = ">=7.6.1" }, + { name = "dishka", marker = "python_full_version >= '3.10'" }, { name = "duckdb" }, + { name = "fsspec", extras = ["s3"] }, { name = "hatch-mypyc" }, { name = "mypy", specifier = ">=1.13.0" }, { name = "myst-parser" }, @@ -5288,6 +5512,7 @@ dev = [ { name = "psutil" }, { name = "pyarrow" }, { name = "pyarrow-stubs" }, + { name = "pydantic-extra-types" }, { name = "pydantic-settings" }, { name = "pyright", specifier = ">=1.1.386" }, { name = "pytest", specifier = ">=8.0.0" }, @@ -5301,6 +5526,7 @@ dev = [ { name = "rich" }, { name = "rich-click" }, { name = "ruff", specifier = ">=0.7.1" }, + { name = "sanic-testing" }, { name = "shibuya" }, { name = "slotscheck", specifier = ">=0.16.5" }, { name = "sphinx", marker = "python_full_version < '3.10'", specifier = ">=7.0.0" }, @@ -5343,9 +5569,13 @@ extras = [ { name = "adbc-driver-manager" }, { name = "adbc-driver-postgresql" }, { name = "adbc-driver-sqlite" }, + { name = "dishka", marker = "python_full_version >= '3.10'" }, + { name = "fsspec", extras = ["s3"] }, { name = "pgvector" }, { name = "polars" }, { name = "pyarrow" }, + { name = "pydantic-extra-types" }, + { name = "sanic-testing" }, ] lint = [ { name = "asyncpg-stubs" }, @@ -5462,6 +5692,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" }, ] +[[package]] +name = "tracerite" +version = "1.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "html5tagger" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/b2/37b825b881f23bc56384c3142214ccbe5d9de7e7c5fe3d155fa032738b98/tracerite-1.1.3.tar.gz", hash = "sha256:119fc006f240aa03fffb41cf99cf82fda5c0449c7d4b6fe42c6340403578b31e", size = 269646, upload-time = "2025-06-19T17:47:42.289Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/bf/c65d36ec5a93048dd55b3247be26059970daad72263e35ecace2f3188b2c/tracerite-1.1.3-py3-none-any.whl", hash = "sha256:811d8e2e0fb563b77340eebe2e9f7b324acfe01e09ea58db8bcaecb24327c823", size = 12422, upload-time = "2025-06-19T17:47:40.173Z" }, +] + [[package]] name = "trove-classifiers" version = "2025.8.26.11" @@ -5570,10 +5812,98 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] +[[package]] +name = "ujson" +version = "5.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/d9/3f17e3c5773fb4941c68d9a37a47b1a79c9649d6c56aefbed87cc409d18a/ujson-5.11.0.tar.gz", hash = "sha256:e204ae6f909f099ba6b6b942131cee359ddda2b6e4ea39c12eb8b991fe2010e0", size = 7156583, upload-time = "2025-08-20T11:57:02.452Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/8bf7a4fabfd01c7eed92d9b290930ce6d14910dec708e73538baa38885d1/ujson-5.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:446e8c11c06048611c9d29ef1237065de0af07cabdd97e6b5b527b957692ec25", size = 55248, upload-time = "2025-08-20T11:55:02.368Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2e/eeab0b8b641817031ede4f790db4c4942df44a12f44d72b3954f39c6a115/ujson-5.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16ccb973b7ada0455201808ff11d48fe9c3f034a6ab5bd93b944443c88299f89", size = 53157, upload-time = "2025-08-20T11:55:04.012Z" }, + { url = "https://files.pythonhosted.org/packages/21/1b/a4e7a41870797633423ea79618526747353fd7be9191f3acfbdee0bf264b/ujson-5.11.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3134b783ab314d2298d58cda7e47e7a0f7f71fc6ade6ac86d5dbeaf4b9770fa6", size = 57657, upload-time = "2025-08-20T11:55:05.169Z" }, + { url = "https://files.pythonhosted.org/packages/94/ae/4e0d91b8f6db7c9b76423b3649612189506d5a06ddd3b6334b6d37f77a01/ujson-5.11.0-cp310-cp310-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:185f93ebccffebc8baf8302c869fac70dd5dd78694f3b875d03a31b03b062cdb", size = 59780, upload-time = "2025-08-20T11:55:06.325Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cc/46b124c2697ca2da7c65c4931ed3cb670646978157aa57a7a60f741c530f/ujson-5.11.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d06e87eded62ff0e5f5178c916337d2262fdbc03b31688142a3433eabb6511db", size = 57307, upload-time = "2025-08-20T11:55:07.493Z" }, + { url = "https://files.pythonhosted.org/packages/39/eb/20dd1282bc85dede2f1c62c45b4040bc4c389c80a05983515ab99771bca7/ujson-5.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:181fb5b15703a8b9370b25345d2a1fd1359f0f18776b3643d24e13ed9c036d4c", size = 1036369, upload-time = "2025-08-20T11:55:09.192Z" }, + { url = "https://files.pythonhosted.org/packages/64/a2/80072439065d493e3a4b1fbeec991724419a1b4c232e2d1147d257cac193/ujson-5.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4df61a6df0a4a8eb5b9b1ffd673429811f50b235539dac586bb7e9e91994138", size = 1195738, upload-time = "2025-08-20T11:55:11.402Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7e/d77f9e9c039d58299c350c978e086a804d1fceae4fd4a1cc6e8d0133f838/ujson-5.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6eff24e1abd79e0ec6d7eae651dd675ddbc41f9e43e29ef81e16b421da896915", size = 1088718, upload-time = "2025-08-20T11:55:13.297Z" }, + { url = "https://files.pythonhosted.org/packages/da/ea/80346b826349d60ca4d612a47cdf3533694e49b45e9d1c07071bb867a184/ujson-5.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7c46cb0fe5e7056b9acb748a4c35aa1b428025853032540bb7e41f46767321f", size = 55248, upload-time = "2025-08-20T11:55:19.033Z" }, + { url = "https://files.pythonhosted.org/packages/57/df/b53e747562c89515e18156513cc7c8ced2e5e3fd6c654acaa8752ffd7cd9/ujson-5.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8951bb7a505ab2a700e26f691bdfacf395bc7e3111e3416d325b513eea03a58", size = 53156, upload-time = "2025-08-20T11:55:20.174Z" }, + { url = "https://files.pythonhosted.org/packages/41/b8/ab67ec8c01b8a3721fd13e5cb9d85ab2a6066a3a5e9148d661a6870d6293/ujson-5.11.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:952c0be400229940248c0f5356514123d428cba1946af6fa2bbd7503395fef26", size = 57657, upload-time = "2025-08-20T11:55:21.296Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c7/fb84f27cd80a2c7e2d3c6012367aecade0da936790429801803fa8d4bffc/ujson-5.11.0-cp311-cp311-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:94fcae844f1e302f6f8095c5d1c45a2f0bfb928cccf9f1b99e3ace634b980a2a", size = 59779, upload-time = "2025-08-20T11:55:22.772Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/48706f7c1e917ecb97ddcfb7b1d756040b86ed38290e28579d63bd3fcc48/ujson-5.11.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e0ec1646db172beb8d3df4c32a9d78015e671d2000af548252769e33079d9a6", size = 57284, upload-time = "2025-08-20T11:55:24.01Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ce/48877c6eb4afddfd6bd1db6be34456538c07ca2d6ed233d3f6c6efc2efe8/ujson-5.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:da473b23e3a54448b008d33f742bcd6d5fb2a897e42d1fc6e7bf306ea5d18b1b", size = 1036395, upload-time = "2025-08-20T11:55:25.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7a/2c20dc97ad70cd7c31ad0596ba8e2cf8794d77191ba4d1e0bded69865477/ujson-5.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:aa6b3d4f1c0d3f82930f4cbd7fe46d905a4a9205a7c13279789c1263faf06dba", size = 1195731, upload-time = "2025-08-20T11:55:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/15/f5/ca454f2f6a2c840394b6f162fff2801450803f4ff56c7af8ce37640b8a2a/ujson-5.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4843f3ab4fe1cc596bb7e02228ef4c25d35b4bb0809d6a260852a4bfcab37ba3", size = 1088710, upload-time = "2025-08-20T11:55:29.426Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ef/a9cb1fce38f699123ff012161599fb9f2ff3f8d482b4b18c43a2dc35073f/ujson-5.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7895f0d2d53bd6aea11743bd56e3cb82d729980636cd0ed9b89418bf66591702", size = 55434, upload-time = "2025-08-20T11:55:34.987Z" }, + { url = "https://files.pythonhosted.org/packages/b1/05/dba51a00eb30bd947791b173766cbed3492269c150a7771d2750000c965f/ujson-5.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12b5e7e22a1fe01058000d1b317d3b65cc3daf61bd2ea7a2b76721fe160fa74d", size = 53190, upload-time = "2025-08-20T11:55:36.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/fd11a224f73fbffa299fb9644e425f38b38b30231f7923a088dd513aabb4/ujson-5.11.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0180a480a7d099082501cad1fe85252e4d4bf926b40960fb3d9e87a3a6fbbc80", size = 57600, upload-time = "2025-08-20T11:55:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/55/b9/405103cae24899df688a3431c776e00528bd4799e7d68820e7ebcf824f92/ujson-5.11.0-cp312-cp312-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:fa79fdb47701942c2132a9dd2297a1a85941d966d8c87bfd9e29b0cf423f26cc", size = 59791, upload-time = "2025-08-20T11:55:38.877Z" }, + { url = "https://files.pythonhosted.org/packages/17/7b/2dcbc2bbfdbf68f2368fb21ab0f6735e872290bb604c75f6e06b81edcb3f/ujson-5.11.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8254e858437c00f17cb72e7a644fc42dad0ebb21ea981b71df6e84b1072aaa7c", size = 57356, upload-time = "2025-08-20T11:55:40.036Z" }, + { url = "https://files.pythonhosted.org/packages/d1/71/fea2ca18986a366c750767b694430d5ded6b20b6985fddca72f74af38a4c/ujson-5.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1aa8a2ab482f09f6c10fba37112af5f957689a79ea598399c85009f2f29898b5", size = 1036313, upload-time = "2025-08-20T11:55:41.408Z" }, + { url = "https://files.pythonhosted.org/packages/a3/bb/d4220bd7532eac6288d8115db51710fa2d7d271250797b0bfba9f1e755af/ujson-5.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a638425d3c6eed0318df663df44480f4a40dc87cc7c6da44d221418312f6413b", size = 1195782, upload-time = "2025-08-20T11:55:43.357Z" }, + { url = "https://files.pythonhosted.org/packages/80/47/226e540aa38878ce1194454385701d82df538ccb5ff8db2cf1641dde849a/ujson-5.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e3cff632c1d78023b15f7e3a81c3745cd3f94c044d1e8fa8efbd6b161997bbc", size = 1088817, upload-time = "2025-08-20T11:55:45.262Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ec/2de9dd371d52c377abc05d2b725645326c4562fc87296a8907c7bcdf2db7/ujson-5.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:109f59885041b14ee9569bf0bb3f98579c3fa0652317b355669939e5fc5ede53", size = 55435, upload-time = "2025-08-20T11:55:50.243Z" }, + { url = "https://files.pythonhosted.org/packages/5b/a4/f611f816eac3a581d8a4372f6967c3ed41eddbae4008d1d77f223f1a4e0a/ujson-5.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a31c6b8004438e8c20fc55ac1c0e07dad42941db24176fe9acf2815971f8e752", size = 53193, upload-time = "2025-08-20T11:55:51.373Z" }, + { url = "https://files.pythonhosted.org/packages/e9/c5/c161940967184de96f5cbbbcce45b562a4bf851d60f4c677704b1770136d/ujson-5.11.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78c684fb21255b9b90320ba7e199780f653e03f6c2528663768965f4126a5b50", size = 57603, upload-time = "2025-08-20T11:55:52.583Z" }, + { url = "https://files.pythonhosted.org/packages/2b/d6/c7b2444238f5b2e2d0e3dab300b9ddc3606e4b1f0e4bed5a48157cebc792/ujson-5.11.0-cp313-cp313-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:4c9f5d6a27d035dd90a146f7761c2272cf7103de5127c9ab9c4cd39ea61e878a", size = 59794, upload-time = "2025-08-20T11:55:53.69Z" }, + { url = "https://files.pythonhosted.org/packages/fe/a3/292551f936d3d02d9af148f53e1bc04306b00a7cf1fcbb86fa0d1c887242/ujson-5.11.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:837da4d27fed5fdc1b630bd18f519744b23a0b5ada1bbde1a36ba463f2900c03", size = 57363, upload-time = "2025-08-20T11:55:54.843Z" }, + { url = "https://files.pythonhosted.org/packages/90/a6/82cfa70448831b1a9e73f882225980b5c689bf539ec6400b31656a60ea46/ujson-5.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:787aff4a84da301b7f3bac09bc696e2e5670df829c6f8ecf39916b4e7e24e701", size = 1036311, upload-time = "2025-08-20T11:55:56.197Z" }, + { url = "https://files.pythonhosted.org/packages/84/5c/96e2266be50f21e9b27acaee8ca8f23ea0b85cb998c33d4f53147687839b/ujson-5.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6dd703c3e86dc6f7044c5ac0b3ae079ed96bf297974598116aa5fb7f655c3a60", size = 1195783, upload-time = "2025-08-20T11:55:58.081Z" }, + { url = "https://files.pythonhosted.org/packages/8d/20/78abe3d808cf3bb3e76f71fca46cd208317bf461c905d79f0d26b9df20f1/ujson-5.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3772e4fe6b0c1e025ba3c50841a0ca4786825a4894c8411bf8d3afe3a8061328", size = 1088822, upload-time = "2025-08-20T11:55:59.469Z" }, + { url = "https://files.pythonhosted.org/packages/28/08/4518146f4984d112764b1dfa6fb7bad691c44a401adadaa5e23ccd930053/ujson-5.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:65724738c73645db88f70ba1f2e6fb678f913281804d5da2fd02c8c5839af302", size = 55462, upload-time = "2025-08-20T11:56:04.873Z" }, + { url = "https://files.pythonhosted.org/packages/29/37/2107b9a62168867a692654d8766b81bd2fd1e1ba13e2ec90555861e02b0c/ujson-5.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29113c003ca33ab71b1b480bde952fbab2a0b6b03a4ee4c3d71687cdcbd1a29d", size = 53246, upload-time = "2025-08-20T11:56:06.054Z" }, + { url = "https://files.pythonhosted.org/packages/9b/f8/25583c70f83788edbe3ca62ce6c1b79eff465d78dec5eb2b2b56b3e98b33/ujson-5.11.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c44c703842024d796b4c78542a6fcd5c3cb948b9fc2a73ee65b9c86a22ee3638", size = 57631, upload-time = "2025-08-20T11:56:07.374Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ca/19b3a632933a09d696f10dc1b0dfa1d692e65ad507d12340116ce4f67967/ujson-5.11.0-cp314-cp314-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:e750c436fb90edf85585f5c62a35b35082502383840962c6983403d1bd96a02c", size = 59877, upload-time = "2025-08-20T11:56:08.534Z" }, + { url = "https://files.pythonhosted.org/packages/55/7a/4572af5324ad4b2bfdd2321e898a527050290147b4ea337a79a0e4e87ec7/ujson-5.11.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f278b31a7c52eb0947b2db55a5133fbc46b6f0ef49972cd1a80843b72e135aba", size = 57363, upload-time = "2025-08-20T11:56:09.758Z" }, + { url = "https://files.pythonhosted.org/packages/7b/71/a2b8c19cf4e1efe53cf439cdf7198ac60ae15471d2f1040b490c1f0f831f/ujson-5.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ab2cb8351d976e788669c8281465d44d4e94413718af497b4e7342d7b2f78018", size = 1036394, upload-time = "2025-08-20T11:56:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3e/7b98668cba3bb3735929c31b999b374ebc02c19dfa98dfebaeeb5c8597ca/ujson-5.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:090b4d11b380ae25453100b722d0609d5051ffe98f80ec52853ccf8249dfd840", size = 1195837, upload-time = "2025-08-20T11:56:12.6Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ea/8870f208c20b43571a5c409ebb2fe9b9dba5f494e9e60f9314ac01ea8f78/ujson-5.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:80017e870d882d5517d28995b62e4e518a894f932f1e242cbc802a2fd64d365c", size = 1088837, upload-time = "2025-08-20T11:56:14.15Z" }, + { url = "https://files.pythonhosted.org/packages/6e/cd/e9809b064a89fe5c4184649adeb13c1b98652db3f8518980b04227358574/ujson-5.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:de6e88f62796372fba1de973c11138f197d3e0e1d80bcb2b8aae1e826096d433", size = 55759, upload-time = "2025-08-20T11:56:18.882Z" }, + { url = "https://files.pythonhosted.org/packages/1b/be/ae26a6321179ebbb3a2e2685b9007c71bcda41ad7a77bbbe164005e956fc/ujson-5.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:49e56ef8066f11b80d620985ae36869a3ff7e4b74c3b6129182ec5d1df0255f3", size = 53634, upload-time = "2025-08-20T11:56:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e9/fb4a220ee6939db099f4cfeeae796ecb91e7584ad4d445d4ca7f994a9135/ujson-5.11.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a325fd2c3a056cf6c8e023f74a0c478dd282a93141356ae7f16d5309f5ff823", size = 58547, upload-time = "2025-08-20T11:56:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/bd/f8/fc4b952b8f5fea09ea3397a0bd0ad019e474b204cabcb947cead5d4d1ffc/ujson-5.11.0-cp314-cp314t-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:a0af6574fc1d9d53f4ff371f58c96673e6d988ed2b5bf666a6143c782fa007e9", size = 60489, upload-time = "2025-08-20T11:56:22.342Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e5/af5491dfda4f8b77e24cf3da68ee0d1552f99a13e5c622f4cef1380925c3/ujson-5.11.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10f29e71ecf4ecd93a6610bd8efa8e7b6467454a363c3d6416db65de883eb076", size = 58035, upload-time = "2025-08-20T11:56:23.92Z" }, + { url = "https://files.pythonhosted.org/packages/c4/09/0945349dd41f25cc8c38d78ace49f14c5052c5bbb7257d2f466fa7bdb533/ujson-5.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1a0a9b76a89827a592656fe12e000cf4f12da9692f51a841a4a07aa4c7ecc41c", size = 1037212, upload-time = "2025-08-20T11:56:25.274Z" }, + { url = "https://files.pythonhosted.org/packages/49/44/8e04496acb3d5a1cbee3a54828d9652f67a37523efa3d3b18a347339680a/ujson-5.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b16930f6a0753cdc7d637b33b4e8f10d5e351e1fb83872ba6375f1e87be39746", size = 1196500, upload-time = "2025-08-20T11:56:27.517Z" }, + { url = "https://files.pythonhosted.org/packages/64/ae/4bc825860d679a0f208a19af2f39206dfd804ace2403330fdc3170334a2f/ujson-5.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:04c41afc195fd477a59db3a84d5b83a871bd648ef371cf8c6f43072d89144eef", size = 1089487, upload-time = "2025-08-20T11:56:29.07Z" }, + { url = "https://files.pythonhosted.org/packages/39/bf/c6f59cdf74ce70bd937b97c31c42fd04a5ed1a9222d0197e77e4bd899841/ujson-5.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65f3c279f4ed4bf9131b11972040200c66ae040368abdbb21596bf1564899694", size = 55283, upload-time = "2025-08-20T11:56:33.947Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c1/a52d55638c0c644b8a63059f95ad5ffcb4ad8f60d8bc3e8680f78e77cc75/ujson-5.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99c49400572cd77050894e16864a335225191fd72a818ea6423ae1a06467beac", size = 53168, upload-time = "2025-08-20T11:56:35.141Z" }, + { url = "https://files.pythonhosted.org/packages/75/6c/e64e19a01d59c8187d01ffc752ee3792a09f5edaaac2a0402de004459dd7/ujson-5.11.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0654a2691fc252c3c525e3d034bb27b8a7546c9d3eb33cd29ce6c9feda361a6a", size = 57809, upload-time = "2025-08-20T11:56:36.293Z" }, + { url = "https://files.pythonhosted.org/packages/9f/36/910117b7a8a1c188396f6194ca7bc8fd75e376d8f7e3cf5eb6219fc8b09d/ujson-5.11.0-cp39-cp39-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:6b6ec7e7321d7fc19abdda3ad809baef935f49673951a8bab486aea975007e02", size = 59797, upload-time = "2025-08-20T11:56:37.746Z" }, + { url = "https://files.pythonhosted.org/packages/c7/17/bcc85d282ee2f4cdef5f577e0a43533eedcae29cc6405edf8c62a7a50368/ujson-5.11.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f62b9976fabbcde3ab6e413f4ec2ff017749819a0786d84d7510171109f2d53c", size = 57378, upload-time = "2025-08-20T11:56:39.123Z" }, + { url = "https://files.pythonhosted.org/packages/ef/39/120bb76441bf835f3c3f42db9c206f31ba875711637a52a8209949ab04b0/ujson-5.11.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f1a27ab91083b4770e160d17f61b407f587548f2c2b5fbf19f94794c495594a", size = 1036515, upload-time = "2025-08-20T11:56:40.848Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ae/fe1b4ff6388f681f6710e9494656957725b1e73ae50421ec04567df9fb75/ujson-5.11.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ecd6ff8a3b5a90c292c2396c2d63c687fd0ecdf17de390d852524393cd9ed052", size = 1195753, upload-time = "2025-08-20T11:56:42.341Z" }, + { url = "https://files.pythonhosted.org/packages/92/20/005b93f2cf846ae50b46812fcf24bbdd127521197e5f1e1a82e3b3e730a1/ujson-5.11.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9aacbeb23fdbc4b256a7d12e0beb9063a1ba5d9e0dbb2cfe16357c98b4334596", size = 1088844, upload-time = "2025-08-20T11:56:43.777Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/30275aa2933430d8c0c4ead951cc4fdb922f575a349aa0b48a6f35449e97/ujson-5.11.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:abae0fb58cc820092a0e9e8ba0051ac4583958495bfa5262a12f628249e3b362", size = 51206, upload-time = "2025-08-20T11:56:48.797Z" }, + { url = "https://files.pythonhosted.org/packages/c3/15/42b3924258eac2551f8f33fa4e35da20a06a53857ccf3d4deb5e5d7c0b6c/ujson-5.11.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fac6c0649d6b7c3682a0a6e18d3de6857977378dce8d419f57a0b20e3d775b39", size = 48907, upload-time = "2025-08-20T11:56:50.136Z" }, + { url = "https://files.pythonhosted.org/packages/94/7e/0519ff7955aba581d1fe1fb1ca0e452471250455d182f686db5ac9e46119/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b42c115c7c6012506e8168315150d1e3f76e7ba0f4f95616f4ee599a1372bbc", size = 50319, upload-time = "2025-08-20T11:56:51.63Z" }, + { url = "https://files.pythonhosted.org/packages/74/cf/209d90506b7d6c5873f82c5a226d7aad1a1da153364e9ebf61eff0740c33/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_i686.manylinux_2_28_i686.whl", hash = "sha256:86baf341d90b566d61a394869ce77188cc8668f76d7bb2c311d77a00f4bdf844", size = 56584, upload-time = "2025-08-20T11:56:52.89Z" }, + { url = "https://files.pythonhosted.org/packages/e9/97/bd939bb76943cb0e1d2b692d7e68629f51c711ef60425fa5bb6968037ecd/ujson-5.11.0-pp311-pypy311_pp73-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4598bf3965fc1a936bd84034312bcbe00ba87880ef1ee33e33c1e88f2c398b49", size = 51588, upload-time = "2025-08-20T11:56:54.054Z" }, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380, upload-time = "2024-08-29T15:43:11.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225, upload-time = "2024-08-29T15:43:08.921Z" }, +] + [[package]] name = "urllib3" version = "2.5.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version == '3.12.*'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, @@ -5629,6 +5959,44 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, ] +[[package]] +name = "uvloop" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f", size = 1442019, upload-time = "2024-10-14T23:37:20.068Z" }, + { url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d", size = 801898, upload-time = "2024-10-14T23:37:22.663Z" }, + { url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26", size = 3827735, upload-time = "2024-10-14T23:37:25.129Z" }, + { url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb", size = 3825126, upload-time = "2024-10-14T23:37:27.59Z" }, + { url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f", size = 3705789, upload-time = "2024-10-14T23:37:29.385Z" }, + { url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c", size = 3800523, upload-time = "2024-10-14T23:37:32.048Z" }, + { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410, upload-time = "2024-10-14T23:37:33.612Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476, upload-time = "2024-10-14T23:37:36.11Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855, upload-time = "2024-10-14T23:37:37.683Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185, upload-time = "2024-10-14T23:37:40.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256, upload-time = "2024-10-14T23:37:42.839Z" }, + { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323, upload-time = "2024-10-14T23:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123, upload-time = "2024-10-14T23:38:00.688Z" }, + { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325, upload-time = "2024-10-14T23:38:02.309Z" }, + { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806, upload-time = "2024-10-14T23:38:04.711Z" }, + { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068, upload-time = "2024-10-14T23:38:06.385Z" }, + { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428, upload-time = "2024-10-14T23:38:08.416Z" }, + { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a4/646a9d0edff7cde25fc1734695d3dfcee0501140dd0e723e4df3f0a50acb/uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b", size = 1439646, upload-time = "2024-10-14T23:38:24.656Z" }, + { url = "https://files.pythonhosted.org/packages/01/2e/e128c66106af9728f86ebfeeb52af27ecd3cb09336f3e2f3e06053707a15/uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2", size = 800931, upload-time = "2024-10-14T23:38:26.087Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1a/9fbc2b1543d0df11f7aed1632f64bdf5ecc4053cf98cdc9edb91a65494f9/uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0", size = 3829660, upload-time = "2024-10-14T23:38:27.905Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c0/392e235e4100ae3b95b5c6dac77f82b529d2760942b1e7e0981e5d8e895d/uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75", size = 3827185, upload-time = "2024-10-14T23:38:29.458Z" }, + { url = "https://files.pythonhosted.org/packages/e1/24/a5da6aba58f99aed5255eca87d58d1760853e8302d390820cc29058408e3/uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd", size = 3705833, upload-time = "2024-10-14T23:38:31.155Z" }, + { url = "https://files.pythonhosted.org/packages/1a/5c/6ba221bb60f1e6474474102e17e38612ec7a06dc320e22b687ab563d877f/uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff", size = 3804696, upload-time = "2024-10-14T23:38:33.633Z" }, +] + [[package]] name = "virtualenv" version = "20.34.0"