diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79cedf577..9b65d4e55 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.12.10" + rev: "v0.12.11" hooks: - id: ruff args: ["--fix"] diff --git a/pyproject.toml b/pyproject.toml index 0a2598089..9b64f15b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [{ name = "Litestar Developers", email = "hello@litestar.dev" }] name = "sqlspec" readme = "README.md" requires-python = ">=3.9, <4.0" -version = "0.21.1" +version = "0.22.0" [project.urls] Discord = "https://discord.gg/litestar" @@ -83,6 +83,7 @@ doc = [ ] extras = [ "adbc_driver_manager", + "fsspec[s3]", "pgvector", "pyarrow", "polars", @@ -341,6 +342,7 @@ module = [ "sqlglot.*", "pgvector", "pgvector.*", + "minio", ] [[tool.mypy.overrides]] diff --git a/sqlspec/base.py b/sqlspec/base.py index 9a5b89774..d110ea453 100644 --- a/sqlspec/base.py +++ b/sqlspec/base.py @@ -64,7 +64,7 @@ def _cleanup_sync_pools(self) -> None: config.close_pool() cleaned_count += 1 except Exception as e: - logger.warning("Failed to clean up sync pool for config %s: %s", config_type.__name__, e) + logger.debug("Failed to clean up sync pool for config %s: %s", config_type.__name__, e) if cleaned_count > 0: logger.debug("Sync pool cleanup completed. Cleaned %d pools.", cleaned_count) @@ -87,14 +87,14 @@ async def close_all_pools(self) -> None: else: sync_configs.append((config_type, config)) except Exception as e: - logger.warning("Failed to prepare cleanup for config %s: %s", config_type.__name__, e) + logger.debug("Failed to prepare cleanup for config %s: %s", config_type.__name__, e) if cleanup_tasks: try: await asyncio.gather(*cleanup_tasks, return_exceptions=True) logger.debug("Async pool cleanup completed. Cleaned %d pools.", len(cleanup_tasks)) except Exception as e: - logger.warning("Failed to complete async pool cleanup: %s", e) + logger.debug("Failed to complete async pool cleanup: %s", e) for _config_type, config in sync_configs: config.close_pool() @@ -129,7 +129,7 @@ def add_config(self, config: "Union[SyncConfigT, AsyncConfigT]") -> "type[Union[ """ config_type = type(config) if config_type in self._configs: - logger.warning("Configuration for %s already exists. Overwriting.", config_type.__name__) + logger.debug("Configuration for %s already exists. Overwriting.", config_type.__name__) self._configs[config_type] = config return config_type diff --git a/sqlspec/loader.py b/sqlspec/loader.py index 8dcec7067..0d837f039 100644 --- a/sqlspec/loader.py +++ b/sqlspec/loader.py @@ -10,18 +10,15 @@ from datetime import datetime, timezone from pathlib import Path from typing import TYPE_CHECKING, Any, Final, Optional, Union +from urllib.parse import unquote, urlparse from sqlspec.core.cache import CacheKey, get_cache_config, get_default_cache from sqlspec.core.statement import SQL -from sqlspec.exceptions import ( - MissingDependencyError, - SQLFileNotFoundError, - SQLFileParseError, - StorageOperationFailedError, -) +from sqlspec.exceptions import SQLFileNotFoundError, SQLFileParseError, StorageOperationFailedError from sqlspec.storage.registry import storage_registry as default_storage_registry from sqlspec.utils.correlation import CorrelationContext from sqlspec.utils.logging import get_logger +from sqlspec.utils.text import slugify if TYPE_CHECKING: from sqlspec.storage.registry import StorageRegistry @@ -54,13 +51,25 @@ def _normalize_query_name(name: str) -> str: """Normalize query name to be a valid Python identifier. + Convert hyphens to underscores, preserve dots for namespacing, + and remove invalid characters. + Args: name: Raw query name from SQL file. Returns: Normalized query name suitable as Python identifier. """ - return TRIM_SPECIAL_CHARS.sub("", name).replace("-", "_") + # Handle namespace parts separately to preserve dots + parts = name.split(".") + normalized_parts = [] + + for part in parts: + # Use slugify with underscore separator and remove any remaining invalid chars + normalized_part = slugify(part, separator="_") + normalized_parts.append(normalized_part) + + return ".".join(normalized_parts) def _normalize_dialect(dialect: str) -> str: @@ -76,19 +85,6 @@ def _normalize_dialect(dialect: str) -> str: return DIALECT_ALIASES.get(normalized, normalized) -def _normalize_dialect_for_sqlglot(dialect: str) -> str: - """Normalize dialect name for SQLGlot compatibility. - - Args: - dialect: Dialect name from SQL file or parameter. - - Returns: - SQLGlot-compatible dialect name. - """ - normalized = dialect.lower().strip() - return DIALECT_ALIASES.get(normalized, normalized) - - class NamedStatement: """Represents a parsed SQL statement with metadata. @@ -218,8 +214,7 @@ def _calculate_file_checksum(self, path: Union[str, Path]) -> str: SQLFileParseError: If file cannot be read. """ try: - content = self._read_file_content(path) - return hashlib.md5(content.encode(), usedforsecurity=False).hexdigest() + return hashlib.md5(self._read_file_content(path).encode(), usedforsecurity=False).hexdigest() except Exception as e: raise SQLFileParseError(str(path), str(path), e) from e @@ -253,19 +248,22 @@ def _read_file_content(self, path: Union[str, Path]) -> str: SQLFileNotFoundError: If file does not exist. SQLFileParseError: If file cannot be read or parsed. """ - path_str = str(path) try: backend = self.storage_registry.get(path) + # For file:// URIs, extract just the filename for the backend call + if path_str.startswith("file://"): + parsed = urlparse(path_str) + file_path = unquote(parsed.path) + # Handle Windows paths (file:///C:/path) + if file_path and len(file_path) > 2 and file_path[2] == ":": # noqa: PLR2004 + file_path = file_path[1:] # Remove leading slash for Windows + filename = Path(file_path).name + return backend.read_text(filename, encoding=self.encoding) return backend.read_text(path_str, encoding=self.encoding) except KeyError as e: raise SQLFileNotFoundError(path_str) from e - except MissingDependencyError: - try: - return path.read_text(encoding=self.encoding) # type: ignore[union-attr] - except FileNotFoundError as e: - raise SQLFileNotFoundError(path_str) from e except StorageOperationFailedError as e: if "not found" in str(e).lower() or "no such file" in str(e).lower(): raise SQLFileNotFoundError(path_str) from e @@ -419,8 +417,7 @@ def _load_directory(self, dir_path: Path) -> int: for file_path in sql_files: relative_path = file_path.relative_to(dir_path) namespace_parts = relative_path.parent.parts - namespace = ".".join(namespace_parts) if namespace_parts else None - self._load_single_file(file_path, namespace) + self._load_single_file(file_path, ".".join(namespace_parts) if namespace_parts else None) return len(sql_files) def _load_single_file(self, file_path: Union[str, Path], namespace: Optional[str]) -> None: @@ -533,44 +530,6 @@ def add_named_sql(self, name: str, sql: str, dialect: "Optional[str]" = None) -> self._queries[normalized_name] = statement self._query_to_file[normalized_name] = "" - def get_sql(self, name: str) -> "SQL": - """Get a SQL object by statement name. - - Args: - name: Name of the statement (from -- name: in SQL file). - Hyphens in names are converted to underscores. - - Returns: - SQL object ready for execution. - - Raises: - SQLFileNotFoundError: If statement name not found. - """ - correlation_id = CorrelationContext.get() - - safe_name = _normalize_query_name(name) - - if safe_name not in self._queries: - available = ", ".join(sorted(self._queries.keys())) if self._queries else "none" - logger.error( - "Statement not found: %s", - name, - extra={ - "statement_name": name, - "safe_name": safe_name, - "available_statements": len(self._queries), - "correlation_id": correlation_id, - }, - ) - raise SQLFileNotFoundError(name, path=f"Statement '{name}' not found. Available statements: {available}") - - parsed_statement = self._queries[safe_name] - sqlglot_dialect = None - if parsed_statement.dialect: - sqlglot_dialect = _normalize_dialect_for_sqlglot(parsed_statement.dialect) - - return SQL(parsed_statement.sql, dialect=sqlglot_dialect) - def get_file(self, path: Union[str, Path]) -> "Optional[SQLFile]": """Get a loaded SQLFile object by path. @@ -659,3 +618,41 @@ def get_query_text(self, name: str) -> str: if safe_name not in self._queries: raise SQLFileNotFoundError(name) return self._queries[safe_name].sql + + def get_sql(self, name: str) -> "SQL": + """Get a SQL object by statement name. + + Args: + name: Name of the statement (from -- name: in SQL file). + Hyphens in names are converted to underscores. + + Returns: + SQL object ready for execution. + + Raises: + SQLFileNotFoundError: If statement name not found. + """ + correlation_id = CorrelationContext.get() + + safe_name = _normalize_query_name(name) + + if safe_name not in self._queries: + available = ", ".join(sorted(self._queries.keys())) if self._queries else "none" + logger.error( + "Statement not found: %s", + name, + extra={ + "statement_name": name, + "safe_name": safe_name, + "available_statements": len(self._queries), + "correlation_id": correlation_id, + }, + ) + raise SQLFileNotFoundError(name, path=f"Statement '{name}' not found. Available statements: {available}") + + parsed_statement = self._queries[safe_name] + sqlglot_dialect = None + if parsed_statement.dialect: + sqlglot_dialect = _normalize_dialect(parsed_statement.dialect) + + return SQL(parsed_statement.sql, dialect=sqlglot_dialect) diff --git a/sqlspec/protocols.py b/sqlspec/protocols.py index f3d0b297e..7484a3dda 100644 --- a/sqlspec/protocols.py +++ b/sqlspec/protocols.py @@ -4,7 +4,7 @@ and runtime isinstance() checks. """ -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Protocol, Union, runtime_checkable +from typing import TYPE_CHECKING, Any, Optional, Protocol, Union, runtime_checkable from typing_extensions import Self @@ -14,7 +14,6 @@ from sqlglot import exp - from sqlspec.storage.capabilities import StorageCapabilities from sqlspec.typing import ArrowRecordBatch, ArrowTable __all__ = ( @@ -194,9 +193,8 @@ class ObjectStoreItemProtocol(Protocol): class ObjectStoreProtocol(Protocol): """Protocol for object storage operations.""" - capabilities: ClassVar["StorageCapabilities"] - protocol: str + backend_type: str def __init__(self, uri: str, **kwargs: Any) -> None: return @@ -330,7 +328,7 @@ async def write_arrow_async(self, path: "Union[str, Path]", table: "ArrowTable", msg = "Async arrow writing not implemented" raise NotImplementedError(msg) - async def stream_arrow_async(self, pattern: str, **kwargs: Any) -> "AsyncIterator[ArrowRecordBatch]": + def stream_arrow_async(self, pattern: str, **kwargs: Any) -> "AsyncIterator[ArrowRecordBatch]": """Async stream Arrow record batches from matching objects.""" msg = "Async arrow streaming not implemented" raise NotImplementedError(msg) diff --git a/sqlspec/storage/__init__.py b/sqlspec/storage/__init__.py index e40325274..d2c405ae1 100644 --- a/sqlspec/storage/__init__.py +++ b/sqlspec/storage/__init__.py @@ -8,16 +8,6 @@ - Capability-based backend selection """ -from sqlspec.protocols import ObjectStoreProtocol -from sqlspec.storage.capabilities import HasStorageCapabilities, StorageCapabilities -from sqlspec.storage.registry import StorageRegistry +from sqlspec.storage.registry import StorageRegistry, storage_registry -storage_registry = StorageRegistry() - -__all__ = ( - "HasStorageCapabilities", - "ObjectStoreProtocol", - "StorageCapabilities", - "StorageRegistry", - "storage_registry", -) +__all__ = ("StorageRegistry", "storage_registry") diff --git a/sqlspec/storage/backends/__init__.py b/sqlspec/storage/backends/__init__.py index e69de29bb..17d48d27e 100644 --- a/sqlspec/storage/backends/__init__.py +++ b/sqlspec/storage/backends/__init__.py @@ -0,0 +1 @@ +"""Storage backends.""" diff --git a/sqlspec/storage/backends/fsspec.py b/sqlspec/storage/backends/fsspec.py index 1887f7a6e..949981786 100644 --- a/sqlspec/storage/backends/fsspec.py +++ b/sqlspec/storage/backends/fsspec.py @@ -1,18 +1,14 @@ import logging from pathlib import Path -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union -from sqlspec.exceptions import MissingDependencyError, StorageOperationFailedError -from sqlspec.storage.backends.base import ObjectStoreBase -from sqlspec.storage.capabilities import StorageCapabilities +from sqlspec.exceptions import MissingDependencyError from sqlspec.typing import FSSPEC_INSTALLED, PYARROW_INSTALLED from sqlspec.utils.sync_tools import async_ if TYPE_CHECKING: from collections.abc import AsyncIterator, Iterator - from fsspec import AbstractFileSystem - from sqlspec.typing import ArrowRecordBatch, ArrowTable __all__ = ("FSSpecBackend",) @@ -56,40 +52,30 @@ async def __anext__(self) -> "ArrowRecordBatch": raise StopAsyncIteration -class FSSpecBackend(ObjectStoreBase): +class FSSpecBackend: """Storage backend using fsspec. - Implements the ObjectStoreProtocol using fsspec for various protocols + Implements ObjectStoreProtocol using fsspec for various protocols including HTTP, HTTPS, FTP, and cloud storage services. """ - _default_capabilities: ClassVar[StorageCapabilities] = StorageCapabilities( - supports_arrow=PYARROW_INSTALLED, - supports_streaming=PYARROW_INSTALLED, - supports_async=True, - supports_compression=True, - is_remote=True, - is_cloud_native=False, - ) - - def __init__(self, fs: "Union[str, AbstractFileSystem]", base_path: str = "") -> None: - if not FSSPEC_INSTALLED: - raise MissingDependencyError(package="fsspec", install_package="fsspec") + def __init__(self, uri: str, **kwargs: Any) -> None: + self._ensure_fsspec() + base_path = kwargs.pop("base_path", "") self.base_path = base_path.rstrip("/") if base_path else "" - if isinstance(fs, str): - import fsspec - - self.fs = fsspec.filesystem(fs.split("://")[0]) - self.protocol = fs.split("://")[0] - self._fs_uri = fs + if "://" in uri: + self.protocol = uri.split("://", maxsplit=1)[0] + self._fs_uri = uri else: - self.fs = fs - self.protocol = getattr(fs, "protocol", "unknown") - self._fs_uri = f"{self.protocol}://" + self.protocol = uri + self._fs_uri = f"{uri}://" + + import fsspec - self._instance_capabilities = self._detect_capabilities() + self.fs = fsspec.filesystem(self.protocol, **kwargs) + self.backend_type = "fsspec" super().__init__() @@ -99,11 +85,22 @@ def from_config(cls, config: "dict[str, Any]") -> "FSSpecBackend": fs_config = config.get("fs_config", {}) base_path = config.get("base_path", "") - import fsspec + uri = f"{protocol}://" + kwargs = dict(fs_config) + if base_path: + kwargs["base_path"] = base_path - fs_instance = fsspec.filesystem(protocol, **fs_config) + return cls(uri=uri, **kwargs) - return cls(fs=fs_instance, base_path=base_path) + def _ensure_fsspec(self) -> None: + """Ensure fsspec is available for operations.""" + if not FSSPEC_INSTALLED: + raise MissingDependencyError(package="fsspec", install_package="fsspec") + + def _ensure_pyarrow(self) -> None: + """Ensure PyArrow is available for Arrow operations.""" + if not PYARROW_INSTALLED: + raise MissingDependencyError(package="pyarrow", install_package="pyarrow") def _resolve_path(self, path: Union[str, Path]) -> str: """Resolve path relative to base_path.""" @@ -112,70 +109,38 @@ def _resolve_path(self, path: Union[str, Path]) -> str: clean_base = self.base_path.rstrip("/") clean_path = path_str.lstrip("/") return f"{clean_base}/{clean_path}" + if self.protocol == "s3" and "://" in self._fs_uri: + # For S3, we need to include the bucket from the URI + # Extract bucket and path from URI like s3://bucket/path + uri_parts = self._fs_uri.split("://", 1)[1] # Remove s3:// + if "/" in uri_parts: + # URI has bucket and base path + return f"{uri_parts.rstrip('/')}/{path_str.lstrip('/')}" + # URI has only bucket + return f"{uri_parts}/{path_str.lstrip('/')}" return path_str - def _detect_capabilities(self) -> StorageCapabilities: - """Detect capabilities based on filesystem protocol.""" - protocol = self.protocol.lower() - - if protocol in {"s3", "s3a", "s3n"}: - return StorageCapabilities.s3_compatible() - if protocol in {"gcs", "gs"}: - return StorageCapabilities.gcs() - if protocol in {"abfs", "az", "azure"}: - return StorageCapabilities.azure_blob() - if protocol in {"file", "local"}: - return StorageCapabilities.local_filesystem() - return StorageCapabilities( - supports_arrow=PYARROW_INSTALLED, - supports_streaming=PYARROW_INSTALLED, - supports_async=True, - supports_compression=True, - is_remote=True, - is_cloud_native=False, - ) - - @property - def capabilities(self) -> StorageCapabilities: - """Return capabilities based on detected protocol.""" - return getattr(self, "_instance_capabilities", self.__class__._default_capabilities) - - @classmethod - def has_capability(cls, capability: str) -> bool: - """Check if backend has a specific capability.""" - return getattr(cls._default_capabilities, capability, False) - - @classmethod - def get_capabilities(cls) -> StorageCapabilities: - """Get all capabilities for this backend.""" - return cls._default_capabilities - - @property - def backend_type(self) -> str: - return "fsspec" - @property def base_uri(self) -> str: return self._fs_uri def read_bytes(self, path: Union[str, Path], **kwargs: Any) -> bytes: """Read bytes from an object.""" - try: - resolved_path = self._resolve_path(path) - return self.fs.cat(resolved_path, **kwargs) # type: ignore[no-any-return] # pyright: ignore - except Exception as exc: - msg = f"Failed to read bytes from {path}" - raise StorageOperationFailedError(msg) from exc + resolved_path = self._resolve_path(path) + return self.fs.cat(resolved_path, **kwargs) # type: ignore[no-any-return] # pyright: ignore def write_bytes(self, path: Union[str, Path], data: bytes, **kwargs: Any) -> None: """Write bytes to an object.""" - try: - resolved_path = self._resolve_path(path) - with self.fs.open(resolved_path, mode="wb", **kwargs) as f: - f.write(data) # pyright: ignore - except Exception as exc: - msg = f"Failed to write bytes to {path}" - raise StorageOperationFailedError(msg) from exc + resolved_path = self._resolve_path(path) + + # Only create directories for local file systems, not for cloud storage + if self.protocol == "file": + parent_dir = str(Path(resolved_path).parent) + if parent_dir and not self.fs.exists(parent_dir): + self.fs.makedirs(parent_dir, exist_ok=True) + + with self.fs.open(resolved_path, mode="wb", **kwargs) as f: + f.write(data) # pyright: ignore def read_text(self, path: Union[str, Path], encoding: str = "utf-8", **kwargs: Any) -> str: """Read text from an object.""" @@ -193,87 +158,59 @@ def exists(self, path: Union[str, Path], **kwargs: Any) -> bool: def delete(self, path: Union[str, Path], **kwargs: Any) -> None: """Delete an object.""" - try: - resolved_path = self._resolve_path(path) - self.fs.rm(resolved_path, **kwargs) - except Exception as exc: - msg = f"Failed to delete {path}" - raise StorageOperationFailedError(msg) from exc + resolved_path = self._resolve_path(path) + self.fs.rm(resolved_path, **kwargs) def copy(self, source: Union[str, Path], destination: Union[str, Path], **kwargs: Any) -> None: """Copy an object.""" - try: - source_path = self._resolve_path(source) - dest_path = self._resolve_path(destination) - self.fs.copy(source_path, dest_path, **kwargs) - except Exception as exc: - msg = f"Failed to copy {source} to {destination}" - raise StorageOperationFailedError(msg) from exc + source_path = self._resolve_path(source) + dest_path = self._resolve_path(destination) + self.fs.copy(source_path, dest_path, **kwargs) def move(self, source: Union[str, Path], destination: Union[str, Path], **kwargs: Any) -> None: """Move an object.""" - try: - source_path = self._resolve_path(source) - dest_path = self._resolve_path(destination) - self.fs.mv(source_path, dest_path, **kwargs) - except Exception as exc: - msg = f"Failed to move {source} to {destination}" - raise StorageOperationFailedError(msg) from exc + source_path = self._resolve_path(source) + dest_path = self._resolve_path(destination) + self.fs.mv(source_path, dest_path, **kwargs) def read_arrow(self, path: Union[str, Path], **kwargs: Any) -> "ArrowTable": """Read an Arrow table from storage.""" if not PYARROW_INSTALLED: raise MissingDependencyError(package="pyarrow", install_package="pyarrow") - try: - import pyarrow.parquet as pq + import pyarrow.parquet as pq - resolved_path = self._resolve_path(path) - with self.fs.open(resolved_path, mode="rb", **kwargs) as f: - return pq.read_table(f) - except Exception as exc: - msg = f"Failed to read Arrow table from {path}" - raise StorageOperationFailedError(msg) from exc + resolved_path = self._resolve_path(path) + with self.fs.open(resolved_path, mode="rb", **kwargs) as f: + return pq.read_table(f) def write_arrow(self, path: Union[str, Path], table: "ArrowTable", **kwargs: Any) -> None: """Write an Arrow table to storage.""" if not PYARROW_INSTALLED: raise MissingDependencyError(package="pyarrow", install_package="pyarrow") - try: - import pyarrow.parquet as pq + import pyarrow.parquet as pq - resolved_path = self._resolve_path(path) - with self.fs.open(resolved_path, mode="wb") as f: - pq.write_table(table, f, **kwargs) # pyright: ignore - except Exception as exc: - msg = f"Failed to write Arrow table to {path}" - raise StorageOperationFailedError(msg) from exc + resolved_path = self._resolve_path(path) + with self.fs.open(resolved_path, mode="wb") as f: + pq.write_table(table, f, **kwargs) # pyright: ignore def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: """List objects with optional prefix.""" - try: - resolved_prefix = self._resolve_path(prefix) - if recursive: - return sorted(self.fs.find(resolved_prefix, **kwargs)) - return sorted(self.fs.ls(resolved_prefix, detail=False, **kwargs)) - except Exception as exc: - msg = f"Failed to list objects with prefix '{prefix}'" - raise StorageOperationFailedError(msg) from exc + resolved_prefix = self._resolve_path(prefix) + if recursive: + return sorted(self.fs.find(resolved_prefix, **kwargs)) + return sorted(self.fs.ls(resolved_prefix, detail=False, **kwargs)) def glob(self, pattern: str, **kwargs: Any) -> list[str]: """Find objects matching a glob pattern.""" - try: - resolved_pattern = self._resolve_path(pattern) - return sorted(self.fs.glob(resolved_pattern, **kwargs)) # pyright: ignore - except Exception as exc: - msg = f"Failed to glob with pattern '{pattern}'" - raise StorageOperationFailedError(msg) from exc + resolved_pattern = self._resolve_path(pattern) + return sorted(self.fs.glob(resolved_pattern, **kwargs)) # pyright: ignore - def is_object(self, path: str) -> bool: + def is_object(self, path: Union[str, Path]) -> bool: """Check if path points to an object.""" resolved_path = self._resolve_path(path) return self.fs.exists(resolved_path) and not self.fs.isdir(resolved_path) - def is_path(self, path: str) -> bool: + def is_path(self, path: Union[str, Path]) -> bool: """Check if path points to a prefix (directory-like).""" resolved_path = self._resolve_path(path) return self.fs.isdir(resolved_path) # type: ignore[no-any-return] @@ -294,9 +231,6 @@ def get_metadata(self, path: Union[str, Path], **kwargs: Any) -> dict[str, Any]: except FileNotFoundError: return {"path": self._resolve_path(path), "exists": False} - except Exception as exc: - msg = f"Failed to get metadata for {path}" - raise StorageOperationFailedError(msg) from exc return { "path": resolved_path, "exists": True, @@ -305,6 +239,11 @@ def get_metadata(self, path: Union[str, Path], **kwargs: Any) -> dict[str, Any]: "type": info.type, } + def sign(self, path: str, expires_in: int = 3600, for_upload: bool = False) -> str: + """Generate a signed URL for the file.""" + resolved_path = self._resolve_path(path) + return f"{self._fs_uri}{resolved_path}" + def _stream_file_batches(self, obj_path: Union[str, Path]) -> "Iterator[ArrowRecordBatch]": import pyarrow.parquet as pq @@ -313,10 +252,8 @@ def _stream_file_batches(self, obj_path: Union[str, Path]) -> "Iterator[ArrowRec yield from parquet_file.iter_batches() def stream_arrow(self, pattern: str, **kwargs: Any) -> "Iterator[ArrowRecordBatch]": - if not FSSPEC_INSTALLED: - raise MissingDependencyError(package="fsspec", install_package="fsspec") - if not PYARROW_INSTALLED: - raise MissingDependencyError(package="pyarrow", install_package="pyarrow") + self._ensure_fsspec() + self._ensure_pyarrow() for obj_path in self.glob(pattern, **kwargs): yield from self._stream_file_batches(obj_path) @@ -339,8 +276,7 @@ def stream_arrow_async(self, pattern: str, **kwargs: Any) -> "AsyncIterator[Arro Returns: AsyncIterator of Arrow record batches """ - if not PYARROW_INSTALLED: - raise MissingDependencyError(package="pyarrow", install_package="pyarrow") + self._ensure_pyarrow() return _ArrowStreamer(self, pattern, **kwargs) @@ -376,6 +312,10 @@ async def get_metadata_async(self, path: Union[str, Path], **kwargs: Any) -> dic """Get object metadata from storage asynchronously.""" return await async_(self.get_metadata)(path, **kwargs) + async def sign_async(self, path: str, expires_in: int = 3600, for_upload: bool = False) -> str: + """Generate a signed URL asynchronously.""" + return await async_(self.sign)(path, expires_in, for_upload) + async def read_arrow_async(self, path: Union[str, Path], **kwargs: Any) -> "ArrowTable": """Read Arrow table from storage asynchronously.""" return await async_(self.read_arrow)(path, **kwargs) diff --git a/sqlspec/storage/backends/local.py b/sqlspec/storage/backends/local.py new file mode 100644 index 000000000..173f053b6 --- /dev/null +++ b/sqlspec/storage/backends/local.py @@ -0,0 +1,310 @@ +"""Local file system storage backend. + +A simple, zero-dependency implementation for local file operations. +No external dependencies like fsspec or obstore required. +""" + +import shutil +from collections.abc import AsyncIterator, Iterator +from pathlib import Path +from typing import TYPE_CHECKING, Any, Optional, Union +from urllib.parse import unquote, urlparse + +from sqlspec.exceptions import MissingDependencyError +from sqlspec.typing import PYARROW_INSTALLED +from sqlspec.utils.sync_tools import async_ + +if TYPE_CHECKING: + import asyncio + + from sqlspec.typing import ArrowRecordBatch, ArrowTable + +__all__ = ("LocalStore",) + + +class LocalStore: + """Simple local file system storage backend. + + Provides file system operations without requiring fsspec or obstore. + Supports file:// URIs and regular file paths. + + Implements ObjectStoreProtocol for type safety. + """ + + __slots__ = ("_loop", "backend_type", "base_path", "protocol") + + def __init__(self, uri: str = "", **kwargs: Any) -> None: + """Initialize local storage backend. + + Args: + uri: File URI or path (e.g., "file:///path" or "/path") + **kwargs: Additional options (base_path for relative operations) + """ + if uri.startswith("file://"): + parsed = urlparse(uri) + path = unquote(parsed.path) + # Handle Windows paths (file:///C:/path) + if path and len(path) > 2 and path[2] == ":": # noqa: PLR2004 + path = path[1:] # Remove leading slash for Windows + self.base_path = Path(path).resolve() + elif uri: + self.base_path = Path(uri).resolve() + else: + self.base_path = Path.cwd() + + # Allow override with explicit base_path + if "base_path" in kwargs: + self.base_path = Path(kwargs["base_path"]).resolve() + + # Create base directory if it doesn't exist and it's actually a directory + if not self.base_path.exists(): + self.base_path.mkdir(parents=True, exist_ok=True) + elif self.base_path.is_file(): + # If base_path points to a file, use its parent as the base directory + self.base_path = self.base_path.parent + self._loop: Optional[asyncio.AbstractEventLoop] = None + + self.protocol = "file" + self.backend_type = "local" + + def _ensure_pyarrow(self) -> None: + """Ensure PyArrow is available for Arrow operations.""" + if not PYARROW_INSTALLED: + raise MissingDependencyError(package="pyarrow", install_package="pyarrow") + + def _resolve_path(self, path: "Union[str, Path]") -> Path: + """Resolve path relative to base_path.""" + p = Path(path) + if p.is_absolute(): + return p + return self.base_path / p + + def read_bytes(self, path: "Union[str, Path]", **kwargs: Any) -> bytes: + """Read bytes from file.""" + resolved = self._resolve_path(path) + return resolved.read_bytes() + + def write_bytes(self, path: "Union[str, Path]", data: bytes, **kwargs: Any) -> None: + """Write bytes to file.""" + resolved = self._resolve_path(path) + resolved.parent.mkdir(parents=True, exist_ok=True) + resolved.write_bytes(data) + + def read_text(self, path: "Union[str, Path]", encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text from file.""" + return self._resolve_path(path).read_text(encoding=encoding) + + def write_text(self, path: "Union[str, Path]", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + """Write text to file.""" + resolved = self._resolve_path(path) + resolved.parent.mkdir(parents=True, exist_ok=True) + resolved.write_text(data, encoding=encoding) + + def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: + """List objects in directory.""" + # If prefix looks like a directory path, treat as directory + if prefix and (prefix.endswith("/") or "/" in prefix): + search_path = self._resolve_path(prefix) + if not search_path.exists(): + return [] + if search_path.is_file(): + return [str(search_path.relative_to(self.base_path))] + else: + # Treat as filename prefix filter + search_path = self.base_path + + pattern = "**/*" if recursive else "*" + files = [] + for path in search_path.glob(pattern): + if path.is_file(): + try: + relative = path.relative_to(self.base_path) + relative_str = str(relative) + # Apply prefix filter if provided + if not prefix or relative_str.startswith(prefix): + files.append(relative_str) + except ValueError: + # Path is outside base_path, use absolute + path_str = str(path) + if not prefix or path_str.startswith(prefix): + files.append(path_str) + + return sorted(files) + + def exists(self, path: "Union[str, Path]", **kwargs: Any) -> bool: + """Check if file exists.""" + return self._resolve_path(path).exists() + + def delete(self, path: "Union[str, Path]", **kwargs: Any) -> None: + """Delete file or directory.""" + resolved = self._resolve_path(path) + if resolved.is_dir(): + shutil.rmtree(resolved) + elif resolved.exists(): + resolved.unlink() + + def copy(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: + """Copy file or directory.""" + src = self._resolve_path(source) + dst = self._resolve_path(destination) + dst.parent.mkdir(parents=True, exist_ok=True) + + if src.is_dir(): + shutil.copytree(src, dst, dirs_exist_ok=True) + else: + shutil.copy2(src, dst) + + def move(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: + """Move file or directory.""" + src = self._resolve_path(source) + dst = self._resolve_path(destination) + dst.parent.mkdir(parents=True, exist_ok=True) + shutil.move(str(src), str(dst)) + + def glob(self, pattern: str, **kwargs: Any) -> list[str]: + """Find files matching pattern.""" + # Handle both relative and absolute patterns + if Path(pattern).is_absolute(): + base_path = Path(pattern).parent + pattern_name = Path(pattern).name + matches = base_path.rglob(pattern_name) if "**" in pattern else base_path.glob(pattern_name) + else: + matches = self.base_path.rglob(pattern) if "**" in pattern else self.base_path.glob(pattern) + + results = [] + for match in matches: + if match.is_file(): + try: + relative = match.relative_to(self.base_path) + results.append(str(relative)) + except ValueError: + results.append(str(match)) + + return sorted(results) + + def get_metadata(self, path: "Union[str, Path]", **kwargs: Any) -> dict[str, Any]: + """Get file metadata.""" + resolved = self._resolve_path(path) + if not resolved.exists(): + return {} + + stat = resolved.stat() + return { + "size": stat.st_size, + "modified": stat.st_mtime, + "created": stat.st_ctime, + "is_file": resolved.is_file(), + "is_dir": resolved.is_dir(), + "path": str(resolved), + } + + def is_object(self, path: "Union[str, Path]") -> bool: + """Check if path points to a file.""" + return self._resolve_path(path).is_file() + + def is_path(self, path: "Union[str, Path]") -> bool: + """Check if path points to a directory.""" + return self._resolve_path(path).is_dir() + + def read_arrow(self, path: "Union[str, Path]", **kwargs: Any) -> "ArrowTable": + """Read Arrow table from file.""" + self._ensure_pyarrow() + import pyarrow.parquet as pq + + return pq.read_table(str(self._resolve_path(path))) + + def write_arrow(self, path: "Union[str, Path]", table: "ArrowTable", **kwargs: Any) -> None: + """Write Arrow table to file.""" + self._ensure_pyarrow() + import pyarrow.parquet as pq + + resolved = self._resolve_path(path) + resolved.parent.mkdir(parents=True, exist_ok=True) + pq.write_table(table, str(resolved)) + + def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator["ArrowRecordBatch"]: + """Stream Arrow record batches from files matching pattern. + + Yields: + Arrow record batches from matching files. + """ + if not PYARROW_INSTALLED: + raise MissingDependencyError(package="pyarrow", install_package="pyarrow") + import pyarrow.parquet as pq + + files = self.glob(pattern) + for file_path in files: + resolved = self._resolve_path(file_path) + parquet_file = pq.ParquetFile(str(resolved)) + yield from parquet_file.iter_batches() + + def sign(self, path: "Union[str, Path]", expires_in: int = 3600, for_upload: bool = False) -> str: + """Generate a signed URL (returns file:// URI for local files).""" + # For local files, just return a file:// URI + # No actual signing needed for local files + return self._resolve_path(path).as_uri() + + # Async methods using sync_tools.async_ + async def read_bytes_async(self, path: "Union[str, Path]", **kwargs: Any) -> bytes: + """Read bytes from file asynchronously.""" + return await async_(self.read_bytes)(path, **kwargs) + + async def write_bytes_async(self, path: "Union[str, Path]", data: bytes, **kwargs: Any) -> None: + """Write bytes to file asynchronously.""" + await async_(self.write_bytes)(path, data, **kwargs) + + async def read_text_async(self, path: "Union[str, Path]", encoding: str = "utf-8", **kwargs: Any) -> str: + """Read text from file asynchronously.""" + return await async_(self.read_text)(path, encoding, **kwargs) + + async def write_text_async( + self, path: "Union[str, Path]", data: str, encoding: str = "utf-8", **kwargs: Any + ) -> None: + """Write text to file asynchronously.""" + await async_(self.write_text)(path, data, encoding, **kwargs) + + async def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: + """List objects asynchronously.""" + return await async_(self.list_objects)(prefix, recursive, **kwargs) + + async def exists_async(self, path: "Union[str, Path]", **kwargs: Any) -> bool: + """Check if file exists asynchronously.""" + return await async_(self.exists)(path, **kwargs) + + async def delete_async(self, path: "Union[str, Path]", **kwargs: Any) -> None: + """Delete file asynchronously.""" + await async_(self.delete)(path, **kwargs) + + async def copy_async(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: + """Copy file asynchronously.""" + await async_(self.copy)(source, destination, **kwargs) + + async def move_async(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: + """Move file asynchronously.""" + await async_(self.move)(source, destination, **kwargs) + + async def get_metadata_async(self, path: "Union[str, Path]", **kwargs: Any) -> dict[str, Any]: + """Get file metadata asynchronously.""" + return await async_(self.get_metadata)(path, **kwargs) + + async def read_arrow_async(self, path: "Union[str, Path]", **kwargs: Any) -> "ArrowTable": + """Read Arrow table asynchronously.""" + return await async_(self.read_arrow)(path, **kwargs) + + async def write_arrow_async(self, path: "Union[str, Path]", table: "ArrowTable", **kwargs: Any) -> None: + """Write Arrow table asynchronously.""" + await async_(self.write_arrow)(path, table, **kwargs) + + def stream_arrow_async(self, pattern: str, **kwargs: Any) -> AsyncIterator["ArrowRecordBatch"]: + """Stream Arrow record batches asynchronously.""" + + # Convert sync iterator to async + async def _stream() -> AsyncIterator["ArrowRecordBatch"]: + for batch in self.stream_arrow(pattern, **kwargs): + yield batch + + return _stream() + + async def sign_async(self, path: "Union[str, Path]", expires_in: int = 3600, for_upload: bool = False) -> str: + """Generate a signed URL asynchronously (returns file:// URI for local files).""" + return await async_(self.sign)(path, expires_in, for_upload) diff --git a/sqlspec/storage/backends/obstore.py b/sqlspec/storage/backends/obstore.py index b4dff7d9b..96ac83c91 100644 --- a/sqlspec/storage/backends/obstore.py +++ b/sqlspec/storage/backends/obstore.py @@ -4,24 +4,19 @@ and local file storage. """ -from __future__ import annotations - import fnmatch import logging -from typing import TYPE_CHECKING, Any, ClassVar, Final, cast - -from mypy_extensions import mypyc_attr - -from sqlspec.exceptions import MissingDependencyError, StorageOperationFailedError -from sqlspec.storage.backends.base import ObjectStoreBase -from sqlspec.storage.capabilities import HasStorageCapabilities, StorageCapabilities -from sqlspec.typing import OBSTORE_INSTALLED +from collections.abc import AsyncIterator, Iterator +from typing import TYPE_CHECKING, Any, Final, Optional, Union, cast +from urllib.parse import urlparse if TYPE_CHECKING: - from collections.abc import AsyncIterator, Iterator from pathlib import Path - from sqlspec.typing import ArrowRecordBatch, ArrowTable +from mypy_extensions import mypyc_attr + +from sqlspec.exceptions import MissingDependencyError, StorageOperationFailedError +from sqlspec.typing import OBSTORE_INSTALLED, PYARROW_INSTALLED, ArrowRecordBatch, ArrowTable __all__ = ("ObStoreBackend",) @@ -31,88 +26,122 @@ class _AsyncArrowIterator: """Helper class to work around mypyc's lack of async generator support.""" - def __init__(self, store: Any, pattern: str, **kwargs: Any) -> None: - self.store = store + def __init__(self, backend: "ObStoreBackend", pattern: str, **kwargs: Any) -> None: + self.backend = backend self.pattern = pattern self.kwargs = kwargs - self._iterator: Any | None = None + self._files_iterator: Optional[Iterator[str]] = None + self._current_file_iterator: Optional[Iterator[ArrowRecordBatch]] = None - def __aiter__(self) -> _AsyncArrowIterator: + def __aiter__(self) -> "_AsyncArrowIterator": return self async def __anext__(self) -> ArrowRecordBatch: - if self._iterator is None: - self._iterator = self.store.stream_arrow_async(self.pattern, **self.kwargs) - if self._iterator is not None: - return cast("ArrowRecordBatch", await self._iterator.__anext__()) - raise StopAsyncIteration + if self._files_iterator is None: + files = self.backend.glob(self.pattern, **self.kwargs) + self._files_iterator = iter(files) + + while True: + if self._current_file_iterator is not None: + try: + return next(self._current_file_iterator) + except StopIteration: + self._current_file_iterator = None + + try: + next_file = next(self._files_iterator) + # Stream from this file + file_batches = self.backend.stream_arrow(next_file) + self._current_file_iterator = iter(file_batches) + except StopIteration: + raise StopAsyncIteration DEFAULT_OPTIONS: Final[dict[str, Any]] = {"connect_timeout": "30s", "request_timeout": "60s"} @mypyc_attr(allow_interpreted_subclasses=True) -class ObStoreBackend(ObjectStoreBase, HasStorageCapabilities): +class ObStoreBackend: """Object storage backend using obstore. - Uses obstore's Rust-based implementation for storage operations. - Supports AWS S3, Google Cloud Storage, Azure Blob Storage, + Implements ObjectStoreProtocol using obstore's Rust-based implementation + for storage operations. Supports AWS S3, Google Cloud Storage, Azure Blob Storage, local filesystem, and HTTP endpoints. """ - capabilities: ClassVar[StorageCapabilities] = StorageCapabilities( - supports_arrow=True, - supports_streaming=True, - supports_async=True, - supports_batch_operations=True, - supports_multipart_upload=True, - supports_compression=True, - is_cloud_native=True, - has_low_latency=True, - ) + __slots__ = ("_path_cache", "backend_type", "base_path", "protocol", "store", "store_options", "store_uri") + + def _ensure_obstore(self) -> None: + """Ensure obstore is available for operations.""" + if not OBSTORE_INSTALLED: + raise MissingDependencyError(package="obstore", install_package="obstore") - __slots__ = ("_path_cache", "base_path", "protocol", "store", "store_options", "store_uri") + def _ensure_pyarrow(self) -> None: + """Ensure PyArrow is available for Arrow operations.""" + if not PYARROW_INSTALLED: + raise MissingDependencyError(package="pyarrow", install_package="pyarrow") - def __init__(self, store_uri: str, base_path: str = "", **store_options: Any) -> None: + def __init__(self, uri: str, **kwargs: Any) -> None: """Initialize obstore backend. Args: - store_uri: Storage URI (e.g., 's3://bucket', 'file:///path', 'gs://bucket') - base_path: Base path prefix for all operations - **store_options: Additional options for obstore configuration + uri: Storage URI (e.g., 's3://bucket', 'file:///path', 'gs://bucket') + **kwargs: Additional options including base_path and obstore configuration """ - if not OBSTORE_INSTALLED: - raise MissingDependencyError(package="obstore", install_package="obstore") + self._ensure_obstore() try: - self.store_uri = store_uri + # Extract base_path from kwargs + base_path = kwargs.pop("base_path", "") + + self.store_uri = uri self.base_path = base_path.rstrip("/") if base_path else "" - self.store_options = store_options + self.store_options = kwargs self.store: Any self._path_cache: dict[str, str] = {} - self.protocol = store_uri.split("://", 1)[0] if "://" in store_uri else "file" + self.protocol = uri.split("://", 1)[0] if "://" in uri else "file" + self.backend_type = "obstore" - if store_uri.startswith("memory://"): + if uri.startswith("memory://"): from obstore.store import MemoryStore self.store = MemoryStore() - elif store_uri.startswith("file://"): + elif uri.startswith("file://"): + from pathlib import Path as PathlibPath + from obstore.store import LocalStore - self.store = LocalStore("/") + parsed = urlparse(uri) + path = parsed.path or "/" + # Create directory if it doesn't exist (ObStore LocalStore requires it) + PathlibPath(path).mkdir(parents=True, exist_ok=True) + self.store = LocalStore(path) else: from obstore.store import from_url - self.store = from_url(store_uri, **store_options) # pyright: ignore[reportAttributeAccessIssue] + self.store = from_url(uri, **kwargs) # pyright: ignore[reportAttributeAccessIssue] - logger.debug("ObStore backend initialized for %s", store_uri) + logger.debug("ObStore backend initialized for %s", uri) except Exception as exc: - msg = f"Failed to initialize obstore backend for {store_uri}" + msg = f"Failed to initialize obstore backend for {uri}" raise StorageOperationFailedError(msg) from exc - def _resolve_path(self, path: str | Path) -> str: + @classmethod + def from_config(cls, config: dict[str, Any]) -> "ObStoreBackend": + """Create backend from configuration dictionary.""" + store_uri = config["store_uri"] + base_path = config.get("base_path", "") + store_options = config.get("store_options", {}) + + kwargs = dict(store_options) + if base_path: + kwargs["base_path"] = base_path + + return cls(uri=store_uri, **kwargs) + + def _resolve_path(self, path: "Union[str, Path]") -> str: """Resolve path relative to base_path.""" path_str = str(path) if path_str.startswith("file://"): @@ -125,49 +154,33 @@ def _resolve_path(self, path: str | Path) -> str: return f"{clean_base}/{clean_path}" return path_str - @property - def backend_type(self) -> str: - """Return backend type identifier.""" - return "obstore" - - def read_bytes(self, path: str | Path, **kwargs: Any) -> bytes: # pyright: ignore[reportUnusedParameter] + def read_bytes(self, path: "Union[str, Path]", **kwargs: Any) -> bytes: # pyright: ignore[reportUnusedParameter] """Read bytes using obstore.""" - try: - result = self.store.get(self._resolve_path(path)) - return cast("bytes", result.bytes().to_bytes()) - except Exception as exc: - msg = f"Failed to read bytes from {path}" - raise StorageOperationFailedError(msg) from exc + result = self.store.get(self._resolve_path(path)) + return cast("bytes", result.bytes().to_bytes()) - def write_bytes(self, path: str | Path, data: bytes, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + def write_bytes(self, path: "Union[str, Path]", data: bytes, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Write bytes using obstore.""" - try: - self.store.put(self._resolve_path(path), data) - except Exception as exc: - msg = f"Failed to write bytes to {path}" - raise StorageOperationFailedError(msg) from exc + self.store.put(self._resolve_path(path), data) - def read_text(self, path: str | Path, encoding: str = "utf-8", **kwargs: Any) -> str: + def read_text(self, path: "Union[str, Path]", encoding: str = "utf-8", **kwargs: Any) -> str: """Read text using obstore.""" return self.read_bytes(path, **kwargs).decode(encoding) - def write_text(self, path: str | Path, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: + def write_text(self, path: "Union[str, Path]", data: str, encoding: str = "utf-8", **kwargs: Any) -> None: """Write text using obstore.""" self.write_bytes(path, data.encode(encoding), **kwargs) def list_objects(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: # pyright: ignore[reportUnusedParameter] """List objects using obstore.""" - try: - resolved_prefix = self._resolve_path(prefix) if prefix else self.base_path or "" - items = ( - self.store.list_with_delimiter(resolved_prefix) if not recursive else self.store.list(resolved_prefix) - ) - return sorted(str(getattr(item, "path", getattr(item, "key", str(item)))) for item in items) - except Exception as exc: - msg = f"Failed to list objects with prefix '{prefix}'" - raise StorageOperationFailedError(msg) from exc - - def exists(self, path: str | Path, **kwargs: Any) -> bool: # pyright: ignore[reportUnusedParameter] + resolved_prefix = self._resolve_path(prefix) if prefix else self.base_path or "" + items = self.store.list_with_delimiter(resolved_prefix) if not recursive else self.store.list(resolved_prefix) + paths: list[str] = [] + for batch in items: + paths.extend(item["path"] for item in batch) + return sorted(paths) + + def exists(self, path: "Union[str, Path]", **kwargs: Any) -> bool: # pyright: ignore[reportUnusedParameter] """Check if object exists using obstore.""" try: self.store.head(self._resolve_path(path)) @@ -175,29 +188,17 @@ def exists(self, path: str | Path, **kwargs: Any) -> bool: # pyright: ignore[re return False return True - def delete(self, path: str | Path, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + def delete(self, path: "Union[str, Path]", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Delete object using obstore.""" - try: - self.store.delete(self._resolve_path(path)) - except Exception as exc: - msg = f"Failed to delete {path}" - raise StorageOperationFailedError(msg) from exc + self.store.delete(self._resolve_path(path)) - def copy(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + def copy(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Copy object using obstore.""" - try: - self.store.copy(self._resolve_path(source), self._resolve_path(destination)) - except Exception as exc: - msg = f"Failed to copy {source} to {destination}" - raise StorageOperationFailedError(msg) from exc + self.store.copy(self._resolve_path(source), self._resolve_path(destination)) - def move(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + def move(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Move object using obstore.""" - try: - self.store.rename(self._resolve_path(source), self._resolve_path(destination)) - except Exception as exc: - msg = f"Failed to move {source} to {destination}" - raise StorageOperationFailedError(msg) from exc + self.store.rename(self._resolve_path(source), self._resolve_path(destination)) def glob(self, pattern: str, **kwargs: Any) -> list[str]: """Find objects matching pattern. @@ -228,7 +229,7 @@ def glob(self, pattern: str, **kwargs: Any) -> list[str]: return matching_objects return [obj for obj in all_objects if fnmatch.fnmatch(obj, resolved_pattern)] - def get_metadata(self, path: str | Path, **kwargs: Any) -> dict[str, Any]: # pyright: ignore[reportUnusedParameter] + def get_metadata(self, path: "Union[str, Path]", **kwargs: Any) -> dict[str, Any]: # pyright: ignore[reportUnusedParameter] """Get object metadata using obstore.""" resolved_path = self._resolve_path(path) result: dict[str, Any] = {} @@ -252,12 +253,12 @@ def get_metadata(self, path: str | Path, **kwargs: Any) -> dict[str, Any]: # py else: return result - def is_object(self, path: str | Path) -> bool: + def is_object(self, path: "Union[str, Path]") -> bool: """Check if path is an object using obstore.""" resolved_path = self._resolve_path(path) return self.exists(path) and not resolved_path.endswith("/") - def is_path(self, path: str | Path) -> bool: + def is_path(self, path: "Union[str, Path]") -> bool: """Check if path is a prefix/directory using obstore.""" resolved_path = self._resolve_path(path) @@ -270,61 +271,53 @@ def is_path(self, path: str | Path) -> bool: except Exception: return False - def read_arrow(self, path: str | Path, **kwargs: Any) -> ArrowTable: + def read_arrow(self, path: "Union[str, Path]", **kwargs: Any) -> ArrowTable: """Read Arrow table using obstore.""" - try: - resolved_path = self._resolve_path(path) - if hasattr(self.store, "read_arrow"): - return self.store.read_arrow(resolved_path, **kwargs) # type: ignore[no-any-return] # pyright: ignore[reportAttributeAccessIssue] + resolved_path = self._resolve_path(path) + if hasattr(self.store, "read_arrow"): + return self.store.read_arrow(resolved_path, **kwargs) # type: ignore[no-any-return] # pyright: ignore[reportAttributeAccessIssue] + + self._ensure_pyarrow() + import io + + import pyarrow.parquet as pq + + return pq.read_table(io.BytesIO(self.read_bytes(resolved_path)), **kwargs) + def write_arrow(self, path: "Union[str, Path]", table: ArrowTable, **kwargs: Any) -> None: + """Write Arrow table using obstore.""" + resolved_path = self._resolve_path(path) + if hasattr(self.store, "write_arrow"): + self.store.write_arrow(resolved_path, table, **kwargs) # pyright: ignore[reportAttributeAccessIssue] + else: + self._ensure_pyarrow() import io + import pyarrow as pa import pyarrow.parquet as pq - data = self.read_bytes(resolved_path) - buffer = io.BytesIO(data) - return pq.read_table(buffer, **kwargs) - except Exception as exc: - msg = f"Failed to read Arrow table from {path}" - raise StorageOperationFailedError(msg) from exc + buffer = io.BytesIO() - def write_arrow(self, path: str | Path, table: ArrowTable, **kwargs: Any) -> None: - """Write Arrow table using obstore.""" - try: - resolved_path = self._resolve_path(path) - if hasattr(self.store, "write_arrow"): - self.store.write_arrow(resolved_path, table, **kwargs) # pyright: ignore[reportAttributeAccessIssue] - else: - import io - - import pyarrow as pa - import pyarrow.parquet as pq - - buffer = io.BytesIO() - - schema = table.schema - if any(str(f.type).startswith("decimal64") for f in schema): - new_fields = [] - for field in schema: - if str(field.type).startswith("decimal64"): - import re - - match = re.match(r"decimal64\((\d+),\s*(\d+)\)", str(field.type)) - if match: - precision, scale = int(match.group(1)), int(match.group(2)) - new_fields.append(pa.field(field.name, pa.decimal128(precision, scale))) - else: - new_fields.append(field) # pragma: no cover + schema = table.schema + if any(str(f.type).startswith("decimal64") for f in schema): + new_fields = [] + for field in schema: + if str(field.type).startswith("decimal64"): + import re + + match = re.match(r"decimal64\((\d+),\s*(\d+)\)", str(field.type)) + if match: + precision, scale = int(match.group(1)), int(match.group(2)) + new_fields.append(pa.field(field.name, pa.decimal128(precision, scale))) else: - new_fields.append(field) - table = table.cast(pa.schema(new_fields)) + new_fields.append(field) # pragma: no cover + else: + new_fields.append(field) + table = table.cast(pa.schema(new_fields)) - pq.write_table(table, buffer, **kwargs) - buffer.seek(0) - self.write_bytes(resolved_path, buffer.read()) - except Exception as exc: - msg = f"Failed to write Arrow table to {path}" - raise StorageOperationFailedError(msg) from exc + pq.write_table(table, buffer, **kwargs) + buffer.seek(0) + self.write_bytes(resolved_path, buffer.read()) def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch]: """Stream Arrow record batches. @@ -332,56 +325,65 @@ def stream_arrow(self, pattern: str, **kwargs: Any) -> Iterator[ArrowRecordBatch Yields: Iterator of Arrow record batches from matching objects. """ - try: - resolved_pattern = self._resolve_path(pattern) - yield from self.store.stream_arrow(resolved_pattern, **kwargs) # pyright: ignore[reportAttributeAccessIssue] - except Exception as exc: - msg = f"Failed to stream Arrow data for pattern {pattern}" - raise StorageOperationFailedError(msg) from exc + self._ensure_pyarrow() + from io import BytesIO + + import pyarrow.parquet as pq + + for obj_path in self.glob(pattern, **kwargs): + result = self.store.get(self._resolve_path(obj_path)) + bytes_obj = result.bytes() + data = bytes_obj.to_bytes() + buffer = BytesIO(data) + parquet_file = pq.ParquetFile(buffer) + yield from parquet_file.iter_batches() + + def sign(self, path: str, expires_in: int = 3600, for_upload: bool = False) -> str: + """Generate a signed URL for the object.""" + resolved_path = self._resolve_path(path) + if hasattr(self.store, "sign_url") and callable(self.store.sign_url): + return self.store.sign_url(resolved_path, expires_in=expires_in) # type: ignore[no-any-return] + return f"{self.store_uri}/{resolved_path}" - async def read_bytes_async(self, path: str | Path, **kwargs: Any) -> bytes: # pyright: ignore[reportUnusedParameter] + async def read_bytes_async(self, path: "Union[str, Path]", **kwargs: Any) -> bytes: # pyright: ignore[reportUnusedParameter] """Read bytes from storage asynchronously.""" - try: - resolved_path = self._resolve_path(path) - result = await self.store.get_async(resolved_path) - bytes_obj = await result.bytes_async() - return bytes_obj.to_bytes() # type: ignore[no-any-return] # pyright: ignore[reportAttributeAccessIssue] - except Exception as exc: - msg = f"Failed to read bytes from {path}" - raise StorageOperationFailedError(msg) from exc + resolved_path = self._resolve_path(path) + result = await self.store.get_async(resolved_path) + bytes_obj = await result.bytes_async() + return bytes_obj.to_bytes() # type: ignore[no-any-return] # pyright: ignore[reportAttributeAccessIssue] - async def write_bytes_async(self, path: str | Path, data: bytes, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + async def write_bytes_async(self, path: "Union[str, Path]", data: bytes, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Write bytes to storage asynchronously.""" resolved_path = self._resolve_path(path) await self.store.put_async(resolved_path, data) async def list_objects_async(self, prefix: str = "", recursive: bool = True, **kwargs: Any) -> list[str]: # pyright: ignore[reportUnusedParameter] """List objects in storage asynchronously.""" - try: - resolved_prefix = self._resolve_path(prefix) if prefix else self.base_path or "" + resolved_prefix = self._resolve_path(prefix) if prefix else self.base_path or "" - objects = [str(item.path) async for item in self.store.list_async(resolved_prefix)] # pyright: ignore[reportAttributeAccessIssue] + objects: list[str] = [] + async for batch in self.store.list_async(resolved_prefix): # pyright: ignore[reportAttributeAccessIssue] + objects.extend(item["path"] for item in batch) - if not recursive and resolved_prefix: - base_depth = resolved_prefix.count("/") - objects = [obj for obj in objects if obj.count("/") <= base_depth + 1] + if not recursive and resolved_prefix: + base_depth = resolved_prefix.count("/") + objects = [obj for obj in objects if obj.count("/") <= base_depth + 1] - return sorted(objects) - except Exception as exc: - msg = f"Failed to list objects with prefix '{prefix}'" - raise StorageOperationFailedError(msg) from exc + return sorted(objects) - async def read_text_async(self, path: str | Path, encoding: str = "utf-8", **kwargs: Any) -> str: + async def read_text_async(self, path: "Union[str, Path]", encoding: str = "utf-8", **kwargs: Any) -> str: """Read text from storage asynchronously.""" data = await self.read_bytes_async(path, **kwargs) return data.decode(encoding) - async def write_text_async(self, path: str | Path, data: str, encoding: str = "utf-8", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + async def write_text_async( + self, path: "Union[str, Path]", data: str, encoding: str = "utf-8", **kwargs: Any + ) -> None: # pyright: ignore[reportUnusedParameter] """Write text to storage asynchronously.""" encoded_data = data.encode(encoding) await self.write_bytes_async(path, encoded_data, **kwargs) - async def exists_async(self, path: str | Path, **kwargs: Any) -> bool: # pyright: ignore[reportUnusedParameter] + async def exists_async(self, path: "Union[str, Path]", **kwargs: Any) -> bool: # pyright: ignore[reportUnusedParameter] """Check if object exists in storage asynchronously.""" resolved_path = self._resolve_path(path) try: @@ -390,24 +392,24 @@ async def exists_async(self, path: str | Path, **kwargs: Any) -> bool: # pyrigh return False return True - async def delete_async(self, path: str | Path, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + async def delete_async(self, path: "Union[str, Path]", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Delete object from storage asynchronously.""" resolved_path = self._resolve_path(path) await self.store.delete_async(resolved_path) - async def copy_async(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + async def copy_async(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Copy object in storage asynchronously.""" source_path = self._resolve_path(source) dest_path = self._resolve_path(destination) await self.store.copy_async(source_path, dest_path) - async def move_async(self, source: str | Path, destination: str | Path, **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] + async def move_async(self, source: "Union[str, Path]", destination: "Union[str, Path]", **kwargs: Any) -> None: # pyright: ignore[reportUnusedParameter] """Move object in storage asynchronously.""" source_path = self._resolve_path(source) dest_path = self._resolve_path(destination) await self.store.rename_async(source_path, dest_path) - async def get_metadata_async(self, path: str | Path, **kwargs: Any) -> dict[str, Any]: # pyright: ignore[reportUnusedParameter] + async def get_metadata_async(self, path: "Union[str, Path]", **kwargs: Any) -> dict[str, Any]: # pyright: ignore[reportUnusedParameter] """Get object metadata from storage asynchronously.""" resolved_path = self._resolve_path(path) result: dict[str, Any] = {} @@ -417,31 +419,40 @@ async def get_metadata_async(self, path: str | Path, **kwargs: Any) -> dict[str, { "path": resolved_path, "exists": True, - "size": metadata.size, - "last_modified": metadata.last_modified, - "e_tag": metadata.e_tag, - "version": metadata.version, + "size": metadata.get("size"), + "last_modified": metadata.get("last_modified"), + "e_tag": metadata.get("e_tag"), + "version": metadata.get("version"), } ) - if hasattr(metadata, "metadata") and metadata.metadata: - result["custom_metadata"] = metadata.metadata + if metadata.get("metadata"): + result["custom_metadata"] = metadata["metadata"] except Exception: return {"path": resolved_path, "exists": False} else: return result - async def read_arrow_async(self, path: str | Path, **kwargs: Any) -> ArrowTable: + async def read_arrow_async(self, path: "Union[str, Path]", **kwargs: Any) -> ArrowTable: """Read Arrow table from storage asynchronously.""" resolved_path = self._resolve_path(path) - return await self.store.read_arrow_async(resolved_path, **kwargs) # type: ignore[no-any-return] # pyright: ignore[reportAttributeAccessIssue] + if hasattr(self.store, "read_arrow_async"): + return await self.store.read_arrow_async(resolved_path, **kwargs) # type: ignore[no-any-return] # pyright: ignore[reportAttributeAccessIssue] + + self._ensure_pyarrow() + import io - async def write_arrow_async(self, path: str | Path, table: ArrowTable, **kwargs: Any) -> None: + import pyarrow.parquet as pq + + return pq.read_table(io.BytesIO(await self.read_bytes_async(resolved_path)), **kwargs) + + async def write_arrow_async(self, path: "Union[str, Path]", table: ArrowTable, **kwargs: Any) -> None: """Write Arrow table to storage asynchronously.""" resolved_path = self._resolve_path(path) if hasattr(self.store, "write_arrow_async"): await self.store.write_arrow_async(resolved_path, table, **kwargs) # pyright: ignore[reportAttributeAccessIssue] else: + self._ensure_pyarrow() import io import pyarrow.parquet as pq @@ -453,4 +464,11 @@ async def write_arrow_async(self, path: str | Path, table: ArrowTable, **kwargs: def stream_arrow_async(self, pattern: str, **kwargs: Any) -> AsyncIterator[ArrowRecordBatch]: resolved_pattern = self._resolve_path(pattern) - return _AsyncArrowIterator(self.store, resolved_pattern, **kwargs) + return _AsyncArrowIterator(self, resolved_pattern, **kwargs) + + async def sign_async(self, path: str, expires_in: int = 3600, for_upload: bool = False) -> str: + """Generate a signed URL asynchronously.""" + resolved_path = self._resolve_path(path) + if hasattr(self.store, "sign_url_async") and callable(self.store.sign_url_async): + return await self.store.sign_url_async(resolved_path, expires_in=expires_in) # type: ignore[no-any-return] + return f"{self.store_uri}/{resolved_path}" diff --git a/sqlspec/storage/capabilities.py b/sqlspec/storage/capabilities.py deleted file mode 100644 index 6df3c33fe..000000000 --- a/sqlspec/storage/capabilities.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Storage backend capability system. - -This module provides a centralized way to track and query storage backend capabilities. -""" - -from dataclasses import dataclass -from typing import ClassVar - -from mypy_extensions import mypyc_attr - -__all__ = ("HasStorageCapabilities", "StorageCapabilities") - - -@dataclass -class StorageCapabilities: - """Tracks capabilities of a storage backend.""" - - supports_read: bool = True - supports_write: bool = True - supports_delete: bool = True - supports_list: bool = True - supports_exists: bool = True - supports_copy: bool = True - supports_move: bool = True - supports_metadata: bool = True - - supports_arrow: bool = False - supports_streaming: bool = False - supports_async: bool = False - supports_batch_operations: bool = False - supports_multipart_upload: bool = False - supports_compression: bool = False - - supports_s3_select: bool = False - supports_gcs_compose: bool = False - supports_azure_snapshots: bool = False - - is_remote: bool = True - is_cloud_native: bool = False - has_low_latency: bool = False - - @classmethod - def local_filesystem(cls) -> "StorageCapabilities": - """Capabilities for local filesystem backend.""" - return cls( - is_remote=False, has_low_latency=True, supports_arrow=True, supports_streaming=True, supports_async=True - ) - - @classmethod - def s3_compatible(cls) -> "StorageCapabilities": - """Capabilities for S3-compatible backends.""" - return cls( - is_cloud_native=True, - supports_multipart_upload=True, - supports_s3_select=True, - supports_arrow=True, - supports_streaming=True, - supports_async=True, - ) - - @classmethod - def gcs(cls) -> "StorageCapabilities": - """Capabilities for Google Cloud Storage.""" - return cls( - is_cloud_native=True, - supports_multipart_upload=True, - supports_gcs_compose=True, - supports_arrow=True, - supports_streaming=True, - supports_async=True, - ) - - @classmethod - def azure_blob(cls) -> "StorageCapabilities": - """Capabilities for Azure Blob Storage.""" - return cls( - is_cloud_native=True, - supports_multipart_upload=True, - supports_azure_snapshots=True, - supports_arrow=True, - supports_streaming=True, - supports_async=True, - ) - - -@mypyc_attr(allow_interpreted_subclasses=True) -class HasStorageCapabilities: - """Mixin for storage backends that expose their capabilities.""" - - __slots__ = () - - capabilities: ClassVar[StorageCapabilities] - - @classmethod - def has_capability(cls, capability: str) -> bool: - """Check if backend has a specific capability.""" - return getattr(cls.capabilities, capability, False) - - @classmethod - def get_capabilities(cls) -> StorageCapabilities: - """Get all capabilities for this backend.""" - return cls.capabilities diff --git a/sqlspec/storage/registry.py b/sqlspec/storage/registry.py index 6a57da1de..e71e9416b 100644 --- a/sqlspec/storage/registry.py +++ b/sqlspec/storage/registry.py @@ -14,7 +14,6 @@ from sqlspec.exceptions import ImproperConfigurationError, MissingDependencyError from sqlspec.protocols import ObjectStoreProtocol -from sqlspec.storage.capabilities import StorageCapabilities from sqlspec.typing import FSSPEC_INSTALLED, OBSTORE_INSTALLED __all__ = ("StorageRegistry", "storage_registry") @@ -22,34 +21,52 @@ logger = logging.getLogger(__name__) +def _is_local_uri(uri: str) -> bool: + """Check if URI represents a local filesystem path.""" + if "://" in uri and not uri.startswith("file://"): + return False + windows_drive_min_length = 3 + return ( + Path(uri).exists() + or Path(uri).is_absolute() + or uri.startswith(("~", ".", "/")) + or (len(uri) >= windows_drive_min_length and uri[1:3] == ":\\") + or "/" in uri + ) + + SCHEME_REGEX: Final = re.compile(r"([a-zA-Z0-9+.-]+)://") -FILE_PROTOCOL: Final[str] = "file" -S3_PROTOCOL: Final[str] = "s3" -GCS_PROTOCOL: Final[str] = "gs" -AZURE_PROTOCOL: Final[str] = "az" + + FSSPEC_ONLY_SCHEMES: Final[frozenset[str]] = frozenset({"http", "https", "ftp", "sftp", "ssh"}) @mypyc_attr(allow_interpreted_subclasses=True) class StorageRegistry: - """Storage registry with URI-first access and automatic backend selection. + """Global storage registry for named backend configurations. - Provides URI-first access pattern with automatic backend selection. - Named aliases support complex configurations. + Allows registering named storage backends that can be accessed from anywhere + in your application. Backends are automatically selected based on URI scheme + unless explicitly overridden. Examples: - backend = registry.get("s3://my-bucket/file.parquet") - backend = registry.get("file:///tmp/data.csv") - backend = registry.get("gs://bucket/data.json") - - registry.register_alias( - "production-s3", - uri="s3://prod-bucket/data", - base_path="sqlspec", - aws_access_key_id="...", - aws_secret_access_key="..." - ) - backend = registry.get("production-s3") + # Direct URI access to storage containers + backend = registry.get("s3://my-bucket") + backend = registry.get("file:///tmp/data") + backend = registry.get("gs://my-gcs-bucket") + + # Named store pattern for environment-specific backends + # Development + registry.register_alias("my_app_store", "file:///tmp/dev_data") + + # Production + registry.register_alias("my_app_store", "s3://prod-bucket/data") + + # Access from anywhere in your app + store = registry.get("my_app_store") # Works in both environments + + # Force specific backend when multiple options available + backend = registry.get("s3://bucket", backend="fsspec") # Force fsspec over obstore """ __slots__ = ("_alias_configs", "_aliases", "_cache", "_instances") @@ -60,44 +77,47 @@ def __init__(self) -> None: self._instances: dict[Union[str, tuple[str, tuple[tuple[str, Any], ...]]], ObjectStoreProtocol] = {} self._cache: dict[str, tuple[str, type[ObjectStoreProtocol]]] = {} + def _make_hashable(self, obj: Any) -> Any: + """Convert nested dict/list structures to hashable tuples.""" + if isinstance(obj, dict): + return tuple(sorted((k, self._make_hashable(v)) for k, v in obj.items())) + if isinstance(obj, list): + return tuple(self._make_hashable(item) for item in obj) + if isinstance(obj, set): + return tuple(sorted(self._make_hashable(item) for item in obj)) + return obj + def register_alias( - self, - alias: str, - uri: str, - *, - backend: Optional[type[ObjectStoreProtocol]] = None, - base_path: str = "", - config: Optional[dict[str, Any]] = None, - **kwargs: Any, + self, alias: str, uri: str, *, backend: Optional[str] = None, base_path: str = "", **kwargs: Any ) -> None: """Register a named alias for a storage configuration. Args: - alias: Unique alias name for the configuration - uri: Storage URI (e.g., "s3://bucket", "file:///path") - backend: Backend class to use (auto-detected from URI if not provided) + alias: Unique alias name (e.g., "my_app_store", "user_uploads") + uri: Storage URI (e.g., "s3://bucket", "file:///path", "gs://bucket") + backend: Force specific backend ("local", "fsspec", "obstore") instead of auto-detection base_path: Base path to prepend to all operations - config: Additional configuration dict **kwargs: Backend-specific configuration options """ - if backend is None: - backend = self._determine_backend_class(uri) + backend_cls = self._get_backend_class(backend) if backend else self._determine_backend_class(uri) - config = config or {} - config.update(kwargs) - backend_config = dict(config) + backend_config = dict(kwargs) if base_path: backend_config["base_path"] = base_path - self._alias_configs[alias] = (backend, uri, backend_config) + self._alias_configs[alias] = (backend_cls, uri, backend_config) + test_config = dict(backend_config) test_config["uri"] = uri self._aliases[alias] = test_config - def get(self, uri_or_alias: Union[str, Path], **kwargs: Any) -> ObjectStoreProtocol: + def get( + self, uri_or_alias: Union[str, Path], *, backend: Optional[str] = None, **kwargs: Any + ) -> ObjectStoreProtocol: """Get backend instance using URI-first routing with automatic backend selection. Args: - uri_or_alias: URI to resolve directly OR named alias + uri_or_alias: URI to resolve directly OR named alias (e.g., "my_app_store") + backend: Force specific backend ("local", "fsspec", "obstore") instead of auto-selection **kwargs: Additional backend-specific configuration options Returns: @@ -113,24 +133,20 @@ def get(self, uri_or_alias: Union[str, Path], **kwargs: Any) -> ObjectStoreProto if isinstance(uri_or_alias, Path): uri_or_alias = f"file://{uri_or_alias.resolve()}" - cache_key = (uri_or_alias, tuple(sorted(kwargs.items()))) if kwargs else uri_or_alias + cache_key = (uri_or_alias, self._make_hashable(kwargs)) if kwargs else uri_or_alias if cache_key in self._instances: return self._instances[cache_key] scheme = self._get_scheme(uri_or_alias) - if not scheme and ( - Path(uri_or_alias).exists() - or Path(uri_or_alias).is_absolute() - or uri_or_alias.startswith(("~", ".")) - or ":\\" in uri_or_alias - or "/" in uri_or_alias - ): + if not scheme and _is_local_uri(uri_or_alias): scheme = "file" uri_or_alias = f"file://{uri_or_alias}" if scheme: - instance = self._resolve_from_uri(uri_or_alias, **kwargs) + instance = self._resolve_from_uri(uri_or_alias, backend_override=backend, **kwargs) elif uri_or_alias in self._alias_configs: backend_cls, stored_uri, config = self._alias_configs[uri_or_alias] + if backend: + backend_cls = self._get_backend_class(backend) instance = backend_cls(stored_uri, **{**config, **kwargs}) else: msg = f"Unknown storage alias or invalid URI: '{uri_or_alias}'" @@ -138,36 +154,66 @@ def get(self, uri_or_alias: Union[str, Path], **kwargs: Any) -> ObjectStoreProto self._instances[cache_key] = instance return instance - def _resolve_from_uri(self, uri: str, **kwargs: Any) -> ObjectStoreProtocol: - """Resolve backend from URI, trying ObStore first, then FSSpec.""" + def _resolve_from_uri( + self, uri: str, *, backend_override: Optional[str] = None, **kwargs: Any + ) -> ObjectStoreProtocol: + """Resolve backend from URI with optional backend override.""" + if backend_override: + return self._create_backend(backend_override, uri, **kwargs) scheme = self._get_scheme(uri) + + # For local files, prefer LocalStore first + if scheme in {None, "file"}: + return self._create_backend("local", uri, **kwargs) + + # Try ObStore first if available and appropriate if scheme not in FSSPEC_ONLY_SCHEMES and OBSTORE_INSTALLED: try: return self._create_backend("obstore", uri, **kwargs) except (ValueError, ImportError, NotImplementedError): pass + + # Try FSSpec if available if FSSPEC_INSTALLED: try: return self._create_backend("fsspec", uri, **kwargs) except (ValueError, ImportError, NotImplementedError): pass - msg = "obstore" - raise MissingDependencyError(msg, "fsspec") + + # For cloud schemes without backends, provide helpful error + msg = f"No backend available for URI scheme '{scheme}'. Install obstore or fsspec for cloud storage support." + raise MissingDependencyError(msg) def _determine_backend_class(self, uri: str) -> type[ObjectStoreProtocol]: """Determine the backend class for a URI based on availability.""" scheme = self._get_scheme(uri) + + # For local files, always use LocalStore + if scheme in {None, "file"}: + return self._get_backend_class("local") + + # FSSpec-only schemes require FSSpec if scheme in FSSPEC_ONLY_SCHEMES and FSSPEC_INSTALLED: return self._get_backend_class("fsspec") + + # Prefer ObStore for cloud storage if available if OBSTORE_INSTALLED: return self._get_backend_class("obstore") + + # Fall back to FSSpec if available if FSSPEC_INSTALLED: return self._get_backend_class("fsspec") - msg = f"No backend available for URI scheme '{scheme}'. Install obstore or fsspec." + + # For cloud schemes without backends, provide helpful error + msg = f"No backend available for URI scheme '{scheme}'. Install obstore or fsspec for cloud storage support." raise MissingDependencyError(msg) def _get_backend_class(self, backend_type: str) -> type[ObjectStoreProtocol]: """Get backend class by type name.""" + if backend_type == "local": + from sqlspec.storage.backends.local import LocalStore + + return cast("type[ObjectStoreProtocol]", LocalStore) if backend_type == "obstore": from sqlspec.storage.backends.obstore import ObStoreBackend @@ -176,7 +222,7 @@ def _get_backend_class(self, backend_type: str) -> type[ObjectStoreProtocol]: from sqlspec.storage.backends.fsspec import FSSpecBackend return cast("type[ObjectStoreProtocol]", FSSpecBackend) - msg = f"Unknown backend type: {backend_type}. Supported types: 'obstore', 'fsspec'" + msg = f"Unknown backend type: {backend_type}. Supported types: 'local', 'obstore', 'fsspec'" raise ValueError(msg) def _create_backend(self, backend_type: str, uri: str, **kwargs: Any) -> ObjectStoreProtocol: @@ -220,20 +266,5 @@ def clear_aliases(self) -> None: self._alias_configs.clear() self._aliases.clear() - def get_backend_capabilities(self, uri_or_alias: Union[str, Path]) -> "StorageCapabilities": - """Get capabilities for a backend without creating an instance.""" - if isinstance(uri_or_alias, Path): - uri_or_alias = f"file://{uri_or_alias.resolve()}" - if "://" in uri_or_alias: - backend_cls = self._determine_backend_class(uri_or_alias) - elif uri_or_alias in self._alias_configs: - backend_cls, _, _ = self._alias_configs[uri_or_alias] - else: - msg = f"Unknown storage alias or invalid URI: '{uri_or_alias}'" - raise ImproperConfigurationError(msg) - if hasattr(backend_cls, "capabilities"): - return backend_cls.capabilities - return StorageCapabilities() - storage_registry = StorageRegistry() diff --git a/tests/integration/test_loader/test_file_system_loading.py b/tests/integration/test_loader/test_file_system_loading.py index b771cfb57..5f2d9be06 100644 --- a/tests/integration/test_loader/test_file_system_loading.py +++ b/tests/integration/test_loader/test_file_system_loading.py @@ -22,20 +22,22 @@ from sqlspec.loader import SQLFileLoader -class TestFileSystemBasicOperations: - """Test basic file system operations.""" - - @pytest.fixture - def temp_workspace(self) -> Generator[Path, None, None]: - """Create a temporary workspace for file system tests.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace - - def test_load_single_file_from_filesystem(self, temp_workspace: Path) -> None: - """Test loading a single SQL file from the file system.""" - sql_file = temp_workspace / "test_queries.sql" - sql_file.write_text(""" +@pytest.fixture +def temp_workspace() -> Generator[Path, None, None]: + """Create a temporary workspace for file system tests.""" + with tempfile.TemporaryDirectory() as temp_dir: + workspace = Path(temp_dir) + yield workspace + + +def test_load_single_file_from_filesystem(temp_workspace: Path) -> None: + """Test loading a single SQL file from the file system. + + Args: + temp_workspace: Temporary directory for test files. + """ + sql_file = temp_workspace / "test_queries.sql" + sql_file.write_text(""" -- name: get_user_count SELECT COUNT(*) as total_users FROM users; @@ -43,22 +45,26 @@ def test_load_single_file_from_filesystem(self, temp_workspace: Path) -> None: SELECT id, name FROM users WHERE active = true; """) - loader = SQLFileLoader() - loader.load_sql(sql_file) + loader = SQLFileLoader() + loader.load_sql(sql_file) - queries = loader.list_queries() - assert "get_user_count" in queries - assert "get_active_users" in queries + queries = loader.list_queries() + assert "get_user_count" in queries + assert "get_active_users" in queries + + user_count_sql = loader.get_sql("get_user_count") + assert isinstance(user_count_sql, SQL) + assert "COUNT(*)" in user_count_sql.sql - user_count_sql = loader.get_sql("get_user_count") - assert isinstance(user_count_sql, SQL) - assert "COUNT(*)" in user_count_sql.sql - def test_load_multiple_files_from_filesystem(self, temp_workspace: Path) -> None: - """Test loading multiple SQL files from the file system.""" +def test_load_multiple_files_from_filesystem(temp_workspace: Path) -> None: + """Test loading multiple SQL files from the file system. - users_file = temp_workspace / "users.sql" - users_file.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + """ + users_file = temp_workspace / "users.sql" + users_file.write_text(""" -- name: create_user INSERT INTO users (name, email) VALUES (:name, :email); @@ -66,8 +72,8 @@ def test_load_multiple_files_from_filesystem(self, temp_workspace: Path) -> None UPDATE users SET email = :email WHERE id = :user_id; """) - products_file = temp_workspace / "products.sql" - products_file.write_text(""" + products_file = temp_workspace / "products.sql" + products_file.write_text(""" -- name: list_products SELECT id, name, price FROM products ORDER BY name; @@ -75,42 +81,46 @@ def test_load_multiple_files_from_filesystem(self, temp_workspace: Path) -> None SELECT * FROM products WHERE id = :product_id; """) - loader = SQLFileLoader() - loader.load_sql(users_file, products_file) + loader = SQLFileLoader() + loader.load_sql(users_file, products_file) - queries = loader.list_queries() - assert "create_user" in queries - assert "update_user_email" in queries - assert "list_products" in queries - assert "get_product_by_id" in queries + queries = loader.list_queries() + assert "create_user" in queries + assert "update_user_email" in queries + assert "list_products" in queries + assert "get_product_by_id" in queries + + files = loader.list_files() + assert str(users_file) in files + assert str(products_file) in files - files = loader.list_files() - assert str(users_file) in files - assert str(products_file) in files - def test_load_directory_structure_from_filesystem(self, temp_workspace: Path) -> None: - """Test loading entire directory structures from file system.""" +def test_load_directory_structure_from_filesystem(temp_workspace: Path) -> None: + """Test loading entire directory structures from file system. - queries_dir = temp_workspace / "queries" - queries_dir.mkdir() + Args: + temp_workspace: Temporary directory for test files. + """ + queries_dir = temp_workspace / "queries" + queries_dir.mkdir() - analytics_dir = queries_dir / "analytics" - analytics_dir.mkdir() + analytics_dir = queries_dir / "analytics" + analytics_dir.mkdir() - admin_dir = queries_dir / "admin" - admin_dir.mkdir() + admin_dir = queries_dir / "admin" + admin_dir.mkdir() - (temp_workspace / "root.sql").write_text(""" + (temp_workspace / "root.sql").write_text(""" -- name: health_check SELECT 'OK' as status; """) - (queries_dir / "common.sql").write_text(""" + (queries_dir / "common.sql").write_text(""" -- name: get_system_info SELECT version() as db_version; """) - (analytics_dir / "reports.sql").write_text(""" + (analytics_dir / "reports.sql").write_text(""" -- name: user_analytics SELECT COUNT(*) as users, AVG(age) as avg_age FROM users; @@ -118,205 +128,246 @@ def test_load_directory_structure_from_filesystem(self, temp_workspace: Path) -> SELECT SUM(amount) as total_sales FROM orders; """) - (admin_dir / "management.sql").write_text(""" + (admin_dir / "management.sql").write_text(""" -- name: cleanup_old_logs DELETE FROM logs WHERE created_at < :cutoff_date; """) - loader = SQLFileLoader() - loader.load_sql(temp_workspace) + loader = SQLFileLoader() + loader.load_sql(temp_workspace) - queries = loader.list_queries() + queries = loader.list_queries() + + assert "health_check" in queries - assert "health_check" in queries + assert "queries.get_system_info" in queries + assert "queries.analytics.user_analytics" in queries + assert "queries.analytics.sales_analytics" in queries + assert "queries.admin.cleanup_old_logs" in queries - assert "queries.get_system_info" in queries - assert "queries.analytics.user_analytics" in queries - assert "queries.analytics.sales_analytics" in queries - assert "queries.admin.cleanup_old_logs" in queries - def test_file_content_encoding_handling(self, temp_workspace: Path) -> None: - """Test handling of different file encodings.""" +def test_file_content_encoding_handling(temp_workspace: Path) -> None: + """Test handling of different file encodings. - utf8_file = temp_workspace / "utf8_queries.sql" - utf8_content = """ + Args: + temp_workspace: Temporary directory for test files. + """ + utf8_file = temp_workspace / "utf8_queries.sql" + utf8_content = """ -- name: unicode_query -- Test with Unicode: 测试 файл עברית SELECT 'Unicode test: 测试' as message; """ - utf8_file.write_text(utf8_content, encoding="utf-8") + utf8_file.write_text(utf8_content, encoding="utf-8") - loader = SQLFileLoader(encoding="utf-8") - loader.load_sql(utf8_file) + loader = SQLFileLoader(encoding="utf-8") + loader.load_sql(utf8_file) - queries = loader.list_queries() - assert "unicode_query" in queries + queries = loader.list_queries() + assert "unicode_query" in queries + + sql = loader.get_sql("unicode_query") + assert isinstance(sql, SQL) - sql = loader.get_sql("unicode_query") - assert isinstance(sql, SQL) - def test_file_modification_detection(self, temp_workspace: Path) -> None: - """Test detection of file modifications.""" - sql_file = temp_workspace / "modifiable.sql" - original_content = """ +def test_file_modification_detection(temp_workspace: Path) -> None: + """Test detection of file modifications. + + Args: + temp_workspace: Temporary directory for test files. + """ + sql_file = temp_workspace / "modifiable.sql" + original_content = """ -- name: original_query SELECT 'original' as version; """ - sql_file.write_text(original_content) + sql_file.write_text(original_content) - loader = SQLFileLoader() - loader.load_sql(sql_file) + loader = SQLFileLoader() + loader.load_sql(sql_file) - sql = loader.get_sql("original_query") - assert "original" in sql.sql + sql = loader.get_sql("original_query") + assert "original" in sql.sql - modified_content = """ + modified_content = """ -- name: modified_query SELECT 'modified' as version; -- name: additional_query SELECT 'new' as status; """ - time.sleep(0.1) - sql_file.write_text(modified_content) + time.sleep(0.1) + sql_file.write_text(modified_content) - loader.clear_cache() - loader.load_sql(sql_file) + loader.clear_cache() + loader.load_sql(sql_file) + + queries = loader.list_queries() + assert "modified_query" in queries + assert "additional_query" in queries + assert "original_query" not in queries - queries = loader.list_queries() - assert "modified_query" in queries - assert "additional_query" in queries - assert "original_query" not in queries - def test_symlink_resolution(self, temp_workspace: Path) -> None: - """Test resolution of symbolic links.""" +def test_symlink_resolution(temp_workspace: Path) -> None: + """Test resolution of symbolic links. - original_file = temp_workspace / "original.sql" - original_file.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + """ + original_file = temp_workspace / "original.sql" + original_file.write_text(""" -- name: symlinked_query SELECT 'from symlink' as source; """) - symlink_file = temp_workspace / "linked.sql" - try: - symlink_file.symlink_to(original_file) - except OSError: - pytest.skip("Symbolic links not supported on this system") + symlink_file = temp_workspace / "linked.sql" + try: + symlink_file.symlink_to(original_file) + except OSError: + pytest.skip("Symbolic links not supported on this system") - loader = SQLFileLoader() - loader.load_sql(symlink_file) + loader = SQLFileLoader() + loader.load_sql(symlink_file) - queries = loader.list_queries() - assert "symlinked_query" in queries + queries = loader.list_queries() + assert "symlinked_query" in queries -class TestFileSystemErrorHandling: - """Test file system error handling scenarios.""" +def test_nonexistent_file_error(temp_workspace: Path) -> None: + """Test error handling for nonexistent files. - @pytest.fixture - def temp_workspace(self) -> Generator[Path, None, None]: - """Create a temporary workspace for error testing.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace + Args: + temp_workspace: Temporary directory for test files. - def test_nonexistent_file_error(self, temp_workspace: Path) -> None: - """Test error handling for nonexistent files.""" - loader = SQLFileLoader() - nonexistent_file = temp_workspace / "does_not_exist.sql" + Raises: + SQLFileNotFoundError: When attempting to load nonexistent file. + """ + loader = SQLFileLoader() + nonexistent_file = temp_workspace / "does_not_exist.sql" - with pytest.raises(SQLFileNotFoundError): - loader.load_sql(nonexistent_file) + with pytest.raises(SQLFileNotFoundError): + loader.load_sql(nonexistent_file) - def test_nonexistent_directory_handling(self, temp_workspace: Path) -> None: - """Test handling of nonexistent directories.""" - loader = SQLFileLoader() - nonexistent_dir = temp_workspace / "does_not_exist" - loader.load_sql(nonexistent_dir) +def test_nonexistent_directory_handling(temp_workspace: Path) -> None: + """Test handling of nonexistent directories. + + Args: + temp_workspace: Temporary directory for test files. + """ + loader = SQLFileLoader() + nonexistent_dir = temp_workspace / "does_not_exist" + + loader.load_sql(nonexistent_dir) + + assert loader.list_queries() == [] + assert loader.list_files() == [] - assert loader.list_queries() == [] - assert loader.list_files() == [] - def test_permission_denied_error(self, temp_workspace: Path) -> None: - """Test handling of permission denied errors.""" - if os.name == "nt": - pytest.skip("Permission testing not reliable on Windows") +def test_permission_denied_error(temp_workspace: Path) -> None: + """Test handling of permission denied errors. - restricted_file = temp_workspace / "restricted.sql" - restricted_file.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + + Raises: + SQLFileParseError: When file permissions prevent reading. + """ + if os.name == "nt": + pytest.skip("Permission testing not reliable on Windows") + + restricted_file = temp_workspace / "restricted.sql" + restricted_file.write_text(""" -- name: restricted_query SELECT 'restricted' as access; """) - restricted_file.chmod(0o000) + restricted_file.chmod(0o000) - try: - loader = SQLFileLoader() + try: + loader = SQLFileLoader() - with pytest.raises(SQLFileParseError): - loader.load_sql(restricted_file) - finally: - restricted_file.chmod(0o644) + with pytest.raises(SQLFileParseError): + loader.load_sql(restricted_file) + finally: + restricted_file.chmod(0o644) + + +def test_corrupted_file_handling(temp_workspace: Path) -> None: + """Test handling of corrupted or invalid SQL files. - def test_corrupted_file_handling(self, temp_workspace: Path) -> None: - """Test handling of corrupted or invalid SQL files.""" - corrupted_file = temp_workspace / "corrupted.sql" + Args: + temp_workspace: Temporary directory for test files. - corrupted_file.write_text(""" + Raises: + SQLFileParseError: When file contains invalid SQL format. + """ + corrupted_file = temp_workspace / "corrupted.sql" + + corrupted_file.write_text(""" This is not a valid SQL file with named queries. It has no proper -- name: declarations. Just random text that should cause parsing to fail. """) - loader = SQLFileLoader() + loader = SQLFileLoader() - with pytest.raises(SQLFileParseError) as exc_info: - loader.load_sql(corrupted_file) + with pytest.raises(SQLFileParseError) as exc_info: + loader.load_sql(corrupted_file) - assert "No named SQL statements found" in str(exc_info.value) + assert "No named SQL statements found" in str(exc_info.value) - def test_empty_file_handling(self, temp_workspace: Path) -> None: - """Test handling of empty files.""" - empty_file = temp_workspace / "empty.sql" - empty_file.write_text("") - loader = SQLFileLoader() +def test_empty_file_handling(temp_workspace: Path) -> None: + """Test handling of empty files. - with pytest.raises(SQLFileParseError) as exc_info: - loader.load_sql(empty_file) + Args: + temp_workspace: Temporary directory for test files. - assert "No named SQL statements found" in str(exc_info.value) + Raises: + SQLFileParseError: When file is empty or contains no SQL statements. + """ + empty_file = temp_workspace / "empty.sql" + empty_file.write_text("") - def test_binary_file_handling(self, temp_workspace: Path) -> None: - """Test handling of binary files with .sql extension.""" - binary_file = temp_workspace / "binary.sql" + loader = SQLFileLoader() - with open(binary_file, "wb") as f: - f.write(b"\x00\x01\x02\x03\x04\x05") + with pytest.raises(SQLFileParseError) as exc_info: + loader.load_sql(empty_file) - loader = SQLFileLoader() + assert "No named SQL statements found" in str(exc_info.value) - with pytest.raises(SQLFileParseError): - loader.load_sql(binary_file) +def test_binary_file_handling(temp_workspace: Path) -> None: + """Test handling of binary files with .sql extension. + + Args: + temp_workspace: Temporary directory for test files. + + Raises: + SQLFileParseError: When file contains binary data instead of text. + """ + binary_file = temp_workspace / "binary.sql" + + with open(binary_file, "wb") as f: + f.write(b"\x00\x01\x02\x03\x04\x05") + + loader = SQLFileLoader() + + with pytest.raises(SQLFileParseError): + loader.load_sql(binary_file) -class TestFileSystemPerformance: - """Test file system performance scenarios.""" - @pytest.fixture - def temp_workspace(self) -> Generator[Path, None, None]: - """Create a temporary workspace for performance testing.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace +def test_large_file_loading_performance(temp_workspace: Path) -> None: + """Test performance with large SQL files. - def test_large_file_loading_performance(self, temp_workspace: Path) -> None: - """Test performance with large SQL files.""" - large_file = temp_workspace / "large_queries.sql" + Args: + temp_workspace: Temporary directory for test files. + """ + large_file = temp_workspace / "large_queries.sql" - large_content = "\n".join( - f""" + large_content = "\n".join( + f""" -- name: large_query_{i:04d} SELECT {i} as query_id, 'This is query number {i}' as description, @@ -328,366 +379,395 @@ def test_large_file_loading_performance(self, temp_workspace: Path) -> None: ORDER BY id LIMIT 1000; """ - for i in range(500) - ) - large_file.write_text(large_content) + for i in range(500) + ) + large_file.write_text(large_content) - loader = SQLFileLoader() + loader = SQLFileLoader() - start_time = time.time() - loader.load_sql(large_file) - end_time = time.time() + start_time = time.time() + loader.load_sql(large_file) + end_time = time.time() - load_time = end_time - start_time + load_time = end_time - start_time - queries = loader.list_queries() - assert len(queries) == 500 + queries = loader.list_queries() + assert len(queries) == 500 + + assert load_time < 5.0, f"Loading took too long: {load_time:.2f}s" - assert load_time < 5.0, f"Loading took too long: {load_time:.2f}s" - def test_many_small_files_performance(self, temp_workspace: Path) -> None: - """Test performance with many small SQL files.""" - files_dir = temp_workspace / "many_files" - files_dir.mkdir() +def test_many_small_files_performance(temp_workspace: Path) -> None: + """Test performance with many small SQL files. - for i in range(100): - small_file = files_dir / f"query_{i:03d}.sql" - small_file.write_text(f""" + Args: + temp_workspace: Temporary directory for test files. + """ + files_dir = temp_workspace / "many_files" + files_dir.mkdir() + + for i in range(100): + small_file = files_dir / f"query_{i:03d}.sql" + small_file.write_text(f""" -- name: small_query_{i:03d} SELECT {i} as file_number, 'small file {i}' as description; """) - loader = SQLFileLoader() + loader = SQLFileLoader() - start_time = time.time() - loader.load_sql(files_dir) - end_time = time.time() + start_time = time.time() + loader.load_sql(files_dir) + end_time = time.time() - load_time = end_time - start_time + load_time = end_time - start_time - queries = loader.list_queries() - assert len(queries) == 100 + queries = loader.list_queries() + assert len(queries) == 100 + + assert load_time < 10.0, f"Loading took too long: {load_time:.2f}s" - assert load_time < 10.0, f"Loading took too long: {load_time:.2f}s" - def test_deep_directory_structure_performance(self, temp_workspace: Path) -> None: - """Test performance with deep directory structures.""" +def test_deep_directory_structure_performance(temp_workspace: Path) -> None: + """Test performance with deep directory structures. - current_path = temp_workspace - for level in range(10): - current_path = current_path / f"level_{level}" - current_path.mkdir() + Args: + temp_workspace: Temporary directory for test files. + """ + current_path = temp_workspace + for level in range(10): + current_path = current_path / f"level_{level}" + current_path.mkdir() - sql_file = current_path / f"queries_level_{level}.sql" - sql_file.write_text(f""" + sql_file = current_path / f"queries_level_{level}.sql" + sql_file.write_text(f""" -- name: deep_query_level_{level} SELECT {level} as depth_level, 'level {level}' as description; """) - loader = SQLFileLoader() + loader = SQLFileLoader() - start_time = time.time() - loader.load_sql(temp_workspace) - end_time = time.time() - - load_time = end_time - start_time + start_time = time.time() + loader.load_sql(temp_workspace) + end_time = time.time() - queries = loader.list_queries() - assert len(queries) == 10 + load_time = end_time - start_time - deepest_namespace = ".".join([f"level_{i}" for i in range(10)]) - deepest_query = f"{deepest_namespace}.deep_query_level_9" - assert deepest_query in queries + queries = loader.list_queries() + assert len(queries) == 10 - assert load_time < 5.0, f"Loading took too long: {load_time:.2f}s" + deepest_namespace = ".".join([f"level_{i}" for i in range(10)]) + deepest_query = f"{deepest_namespace}.deep_query_level_9" + assert deepest_query in queries + assert load_time < 5.0, f"Loading took too long: {load_time:.2f}s" -class TestFileSystemConcurrency: - """Test concurrent file system access patterns.""" - @pytest.fixture - def temp_workspace(self) -> Generator[Path, None, None]: - """Create a temporary workspace for concurrency testing.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace +def test_concurrent_file_modification(temp_workspace: Path) -> None: + """Test handling of concurrent file modifications. - def test_concurrent_file_modification(self, temp_workspace: Path) -> None: - """Test handling of concurrent file modifications.""" - shared_file = temp_workspace / "shared.sql" + Args: + temp_workspace: Temporary directory for test files. + """ + shared_file = temp_workspace / "shared.sql" - shared_file.write_text(""" + shared_file.write_text(""" -- name: shared_query_v1 SELECT 'version 1' as version; """) - loader1 = SQLFileLoader() - loader2 = SQLFileLoader() + loader1 = SQLFileLoader() + loader2 = SQLFileLoader() - loader1.load_sql(shared_file) - loader2.load_sql(shared_file) + loader1.load_sql(shared_file) + loader2.load_sql(shared_file) - assert "shared_query_v1" in loader1.list_queries() - assert "shared_query_v1" in loader2.list_queries() + assert "shared_query_v1" in loader1.list_queries() + assert "shared_query_v1" in loader2.list_queries() - shared_file.write_text(""" + shared_file.write_text(""" -- name: shared_query_v2 SELECT 'version 2' as version; """) - loader1.clear_cache() - loader1.load_sql(shared_file) + loader1.clear_cache() + loader1.load_sql(shared_file) + + assert "shared_query_v2" in loader1.list_queries() + assert "shared_query_v1" not in loader1.list_queries() + + assert "shared_query_v1" in loader2.list_queries() + assert "shared_query_v2" not in loader2.list_queries() - assert "shared_query_v2" in loader1.list_queries() - assert "shared_query_v1" not in loader1.list_queries() - assert "shared_query_v1" in loader2.list_queries() - assert "shared_query_v2" not in loader2.list_queries() +def test_multiple_loaders_same_file(temp_workspace: Path) -> None: + """Test multiple loaders accessing the same file. - def test_multiple_loaders_same_file(self, temp_workspace: Path) -> None: - """Test multiple loaders accessing the same file.""" - sql_file = temp_workspace / "multi_access.sql" - sql_file.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + """ + sql_file = temp_workspace / "multi_access.sql" + sql_file.write_text(""" -- name: multi_access_query SELECT 'accessed by multiple loaders' as message; """) - loaders = [SQLFileLoader() for _ in range(5)] + loaders = [SQLFileLoader() for _ in range(5)] - for loader in loaders: - loader.load_sql(sql_file) + for loader in loaders: + loader.load_sql(sql_file) + + for i, loader in enumerate(loaders): + queries = loader.list_queries() + assert "multi_access_query" in queries, f"Loader {i} missing query" - for i, loader in enumerate(loaders): - queries = loader.list_queries() - assert "multi_access_query" in queries, f"Loader {i} missing query" + sql = loader.get_sql("multi_access_query") + assert isinstance(sql, SQL) - sql = loader.get_sql("multi_access_query") - assert isinstance(sql, SQL) - def test_loader_isolation(self, temp_workspace: Path) -> None: - """Test that loaders are properly isolated from each other.""" - file1 = temp_workspace / "loader1.sql" - file2 = temp_workspace / "loader2.sql" +def test_loader_isolation(temp_workspace: Path) -> None: + """Test that loaders are properly isolated from each other. - file1.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + """ + file1 = temp_workspace / "loader1.sql" + file2 = temp_workspace / "loader2.sql" + + file1.write_text(""" -- name: loader1_query SELECT 'from loader 1' as source; """) - file2.write_text(""" + file2.write_text(""" -- name: loader2_query SELECT 'from loader 2' as source; """) - loader1 = SQLFileLoader() - loader2 = SQLFileLoader() + loader1 = SQLFileLoader() + loader2 = SQLFileLoader() - loader1.load_sql(file1) - loader2.load_sql(file2) + loader1.load_sql(file1) + loader2.load_sql(file2) - queries1 = loader1.list_queries() - queries2 = loader2.list_queries() + queries1 = loader1.list_queries() + queries2 = loader2.list_queries() - assert "loader1_query" in queries1 - assert "loader1_query" not in queries2 + assert "loader1_query" in queries1 + assert "loader1_query" not in queries2 - assert "loader2_query" in queries2 - assert "loader2_query" not in queries1 + assert "loader2_query" in queries2 + assert "loader2_query" not in queries1 -class TestFileSystemCacheIntegration: - """Test file system integration with cache system.""" +def test_file_cache_persistence_across_loaders(temp_workspace: Path) -> None: + """Test that file cache persists across different loader instances. - @pytest.fixture - def temp_workspace(self) -> Generator[Path, None, None]: - """Create a temporary workspace for cache integration testing.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace + Args: + temp_workspace: Temporary directory for test files. + """ - def test_file_cache_persistence_across_loaders(self, temp_workspace: Path) -> None: - """Test that file cache persists across different loader instances.""" - sql_file = temp_workspace / "cached.sql" - sql_file.write_text(""" + sql_file = temp_workspace / "cached.sql" + sql_file.write_text(""" -- name: cached_query SELECT 'cached content' as status; """) - loader1 = SQLFileLoader() - loader1.load_sql(sql_file) + loader1 = SQLFileLoader() + loader1.load_sql(sql_file) - loader2 = SQLFileLoader() + loader2 = SQLFileLoader() - with patch("sqlspec.loader.get_cache_config") as mock_config: - mock_cache_config = Mock() - mock_cache_config.compiled_cache_enabled = True - mock_config.return_value = mock_cache_config + with patch("sqlspec.loader.get_cache_config") as mock_config: + mock_cache_config = Mock() + mock_cache_config.compiled_cache_enabled = True + mock_config.return_value = mock_cache_config - start_time = time.time() - loader2.load_sql(sql_file) - end_time = time.time() + start_time = time.time() + loader2.load_sql(sql_file) + end_time = time.time() - cache_load_time = end_time - start_time + cache_load_time = end_time - start_time - assert "cached_query" in loader2.list_queries() + assert "cached_query" in loader2.list_queries() - assert cache_load_time < 1.0 + assert cache_load_time < 1.0 - def test_cache_invalidation_on_file_change(self, temp_workspace: Path) -> None: - """Test cache invalidation when files change.""" - sql_file = temp_workspace / "changing.sql" - original_content = """ +def test_cache_invalidation_on_file_change(temp_workspace: Path) -> None: + """Test cache invalidation when files change. + + Args: + temp_workspace: Temporary directory for test files. + """ + + sql_file = temp_workspace / "changing.sql" + + original_content = """ -- name: changing_query_v1 SELECT 'version 1' as version; """ - sql_file.write_text(original_content) + sql_file.write_text(original_content) - with patch("sqlspec.loader.get_cache_config") as mock_config: - mock_cache_config = Mock() - mock_cache_config.compiled_cache_enabled = True - mock_config.return_value = mock_cache_config + with patch("sqlspec.loader.get_cache_config") as mock_config: + mock_cache_config = Mock() + mock_cache_config.compiled_cache_enabled = True + mock_config.return_value = mock_cache_config - loader = SQLFileLoader() - loader.load_sql(sql_file) + loader = SQLFileLoader() + loader.load_sql(sql_file) - assert "changing_query_v1" in loader.list_queries() + assert "changing_query_v1" in loader.list_queries() - modified_content = """ + modified_content = """ -- name: changing_query_v2 SELECT 'version 2' as version; """ - time.sleep(0.1) - sql_file.write_text(modified_content) + time.sleep(0.1) + sql_file.write_text(modified_content) + + loader.clear_cache() + loader.load_sql(sql_file) + + queries = loader.list_queries() + assert "changing_query_v2" in queries + assert "changing_query_v1" not in queries - loader.clear_cache() - loader.load_sql(sql_file) - queries = loader.list_queries() - assert "changing_query_v2" in queries - assert "changing_query_v1" not in queries +def test_cache_behavior_with_file_deletion(temp_workspace: Path) -> None: + """Test cache behavior when cached files are deleted. - def test_cache_behavior_with_file_deletion(self, temp_workspace: Path) -> None: - """Test cache behavior when cached files are deleted.""" - sql_file = temp_workspace / "deletable.sql" - sql_file.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + + Raises: + SQLFileNotFoundError: When attempting to load deleted file. + """ + sql_file = temp_workspace / "deletable.sql" + sql_file.write_text(""" -- name: deletable_query SELECT 'will be deleted' as status; """) - loader = SQLFileLoader() - loader.load_sql(sql_file) - - assert "deletable_query" in loader.list_queries() + loader = SQLFileLoader() + loader.load_sql(sql_file) - sql_file.unlink() + assert "deletable_query" in loader.list_queries() - loader2 = SQLFileLoader() + sql_file.unlink() - with pytest.raises(SQLFileNotFoundError): - loader2.load_sql(sql_file) + loader2 = SQLFileLoader() - assert "deletable_query" in loader.list_queries() + with pytest.raises(SQLFileNotFoundError): + loader2.load_sql(sql_file) + assert "deletable_query" in loader.list_queries() -class TestFileSystemUnicodeAndSpecialCharacters: - """Test file system handling of Unicode and special characters.""" - @pytest.fixture - def temp_workspace(self) -> Generator[Path, None, None]: - """Create a temporary workspace for Unicode testing.""" - with tempfile.TemporaryDirectory() as temp_dir: - workspace = Path(temp_dir) - yield workspace +def test_unicode_file_names(temp_workspace: Path) -> None: + """Test handling of Unicode file names. - def test_unicode_file_names(self, temp_workspace: Path) -> None: - """Test handling of Unicode file names.""" - try: - unicode_file = temp_workspace / "测试_файл_test.sql" - unicode_file.write_text( - """ + Args: + temp_workspace: Temporary directory for test files. + """ + try: + unicode_file = temp_workspace / "测试_файл_test.sql" + unicode_file.write_text( + """ -- name: unicode_filename_query SELECT 'Unicode filename works' as message; """, - encoding="utf-8", - ) - except OSError: - pytest.skip("Unicode filenames not supported on this system") + encoding="utf-8", + ) + except OSError: + pytest.skip("Unicode filenames not supported on this system") - loader = SQLFileLoader() - loader.load_sql(unicode_file) + loader = SQLFileLoader() + loader.load_sql(unicode_file) + + queries = loader.list_queries() + assert "unicode_filename_query" in queries - queries = loader.list_queries() - assert "unicode_filename_query" in queries - def test_unicode_file_content(self, temp_workspace: Path) -> None: - """Test handling of Unicode content in files.""" - unicode_file = temp_workspace / "unicode_content.sql" +def test_unicode_file_content(temp_workspace: Path) -> None: + """Test handling of Unicode content in files. - unicode_content = """ + Args: + temp_workspace: Temporary directory for test files. + """ + unicode_file = temp_workspace / "unicode_content.sql" + + unicode_content = """ -- name: unicode_content_query -- Unicode comment: 这是一个测试 файл на русском עברית SELECT 'Unicode: 测试 тест עברית' as multilingual_message, 'Symbols: ★ ♥ ⚡ ✓' as symbols, 'Math: ∑ ∆ π ∞' as math_symbols; """ - unicode_file.write_text(unicode_content, encoding="utf-8") + unicode_file.write_text(unicode_content, encoding="utf-8") - loader = SQLFileLoader(encoding="utf-8") - loader.load_sql(unicode_file) + loader = SQLFileLoader(encoding="utf-8") + loader.load_sql(unicode_file) - queries = loader.list_queries() - assert "unicode_content_query" in queries + queries = loader.list_queries() + assert "unicode_content_query" in queries - sql = loader.get_sql("unicode_content_query") - assert "Unicode: 测试 тест עברית" in sql.sql + sql = loader.get_sql("unicode_content_query") + assert "Unicode: 测试 тест עברית" in sql.sql - def test_mixed_encoding_handling(self, temp_workspace: Path) -> None: - """Test handling of different encodings.""" - utf8_file = temp_workspace / "utf8.sql" - utf8_file.write_text( - """ +def test_mixed_encoding_handling(temp_workspace: Path) -> None: + """Test handling of different encodings. + + Args: + temp_workspace: Temporary directory for test files. + """ + utf8_file = temp_workspace / "utf8.sql" + utf8_file.write_text( + """ -- name: utf8_query SELECT 'UTF-8: 测试' as message; """, - encoding="utf-8", - ) + encoding="utf-8", + ) - latin1_file = temp_workspace / "latin1.sql" - latin1_content = """ + latin1_file = temp_workspace / "latin1.sql" + latin1_content = """ -- name: latin1_query SELECT 'Latin-1: café' as message; """ - latin1_file.write_text(latin1_content, encoding="latin-1") + latin1_file.write_text(latin1_content, encoding="latin-1") - utf8_loader = SQLFileLoader(encoding="utf-8") - utf8_loader.load_sql(utf8_file) + utf8_loader = SQLFileLoader(encoding="utf-8") + utf8_loader.load_sql(utf8_file) - assert "utf8_query" in utf8_loader.list_queries() + assert "utf8_query" in utf8_loader.list_queries() - latin1_loader = SQLFileLoader(encoding="latin-1") - latin1_loader.load_sql(latin1_file) + latin1_loader = SQLFileLoader(encoding="latin-1") + latin1_loader.load_sql(latin1_file) - assert "latin1_query" in latin1_loader.list_queries() + assert "latin1_query" in latin1_loader.list_queries() - def test_special_characters_in_paths(self, temp_workspace: Path) -> None: - """Test handling of special characters in file paths.""" - try: - special_dir = temp_workspace / "special-chars_&_symbols!@#$" - special_dir.mkdir() +def test_special_characters_in_paths(temp_workspace: Path) -> None: + """Test handling of special characters in file paths. - special_file = special_dir / "query-file_with&symbols.sql" - special_file.write_text(""" + Args: + temp_workspace: Temporary directory for test files. + """ + try: + special_dir = temp_workspace / "special-chars_&_symbols!@#$" + special_dir.mkdir() + + special_file = special_dir / "query-file_with&symbols.sql" + special_file.write_text(""" -- name: special_path_query SELECT 'Special path works' as result; """) - except OSError: - pytest.skip("Special characters in paths not supported on this system") + except OSError: + pytest.skip("Special characters in paths not supported on this system") - loader = SQLFileLoader() - loader.load_sql(special_file) + loader = SQLFileLoader() + loader.load_sql(special_file) - queries = loader.list_queries() - assert "special_path_query" in queries + queries = loader.list_queries() + assert "special_path_query" in queries diff --git a/tests/integration/test_storage/__init__.py b/tests/integration/test_storage/__init__.py new file mode 100644 index 000000000..aaed48ffb --- /dev/null +++ b/tests/integration/test_storage/__init__.py @@ -0,0 +1 @@ +"""Integration tests for storage backends.""" diff --git a/tests/integration/test_storage/test_storage_integration.py b/tests/integration/test_storage/test_storage_integration.py new file mode 100644 index 000000000..c8a4a438e --- /dev/null +++ b/tests/integration/test_storage/test_storage_integration.py @@ -0,0 +1,866 @@ +"""Integration tests for storage backends using minio fixtures. + +Tests storage backend operations against S3-compatible storage using pytest-databases minio fixtures. +Follows Advanced Alchemy patterns for comprehensive storage testing. +""" + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +from minio import Minio +from pytest_databases.docker.minio import MinioService + +from sqlspec.protocols import ObjectStoreProtocol +from sqlspec.storage.registry import storage_registry +from sqlspec.typing import FSSPEC_INSTALLED, OBSTORE_INSTALLED, PYARROW_INSTALLED + +if TYPE_CHECKING: + pass + +# Test data +TEST_TEXT_CONTENT = "Hello, SQLSpec storage integration test!" +TEST_BINARY_CONTENT = ( + b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08\x02\x00\x00\x00\x90wS\xde" +) + + +@pytest.fixture +def local_test_setup(tmp_path: Path) -> Path: + """Create test directory with sample files.""" + test_dir = tmp_path / "storage_test" + test_dir.mkdir() + + # Create sample files + (test_dir / "test.txt").write_text(TEST_TEXT_CONTENT) + (test_dir / "test.bin").write_bytes(TEST_BINARY_CONTENT) + + # Create subdirectory structure + subdir = test_dir / "subdir" + subdir.mkdir() + (subdir / "nested.txt").write_text("Nested file content") + + return test_dir + + +@pytest.fixture +def fsspec_s3_backend(minio_service: "MinioService", minio_default_bucket_name: str) -> "ObjectStoreProtocol": + """Set up FSSpec S3 backend for testing.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + return FSSpecBackend( + uri=f"s3://{minio_default_bucket_name}", + endpoint_url=f"http://{minio_service.endpoint}", + key=minio_service.access_key, + secret=minio_service.secret_key, + use_ssl=False, + client_kwargs={"verify": False, "use_ssl": False}, + ) + + +@pytest.fixture +def obstore_s3_backend(minio_service: "MinioService", minio_default_bucket_name: str) -> "ObjectStoreProtocol": + """Set up ObStore S3 backend for testing.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + s3_uri = f"s3://{minio_default_bucket_name}" + return ObStoreBackend( + s3_uri, + aws_endpoint=f"http://{minio_service.endpoint}", + aws_access_key_id=minio_service.access_key, + aws_secret_access_key=minio_service.secret_key, + aws_virtual_hosted_style_request=False, + client_options={"allow_http": True}, + ) + + +# Local storage tests + + +@pytest.mark.xdist_group("storage") +def test_local_store_file_operations(local_test_setup: Path) -> None: + """Test LocalStore basic file operations.""" + from sqlspec.storage.backends.local import LocalStore + + store = LocalStore(str(local_test_setup)) + + # Test exists + assert store.exists("test.txt") + assert not store.exists("nonexistent.txt") + + # Test read operations + text_content = store.read_text("test.txt") + assert text_content == TEST_TEXT_CONTENT + + binary_content = store.read_bytes("test.bin") + assert binary_content == TEST_BINARY_CONTENT + + +@pytest.mark.xdist_group("storage") +def test_local_store_write_operations(local_test_setup: Path) -> None: + """Test LocalStore write operations.""" + from sqlspec.storage.backends.local import LocalStore + + store = LocalStore(str(local_test_setup)) + + # Test write text + new_text = "New text content" + store.write_text("new.txt", new_text) + assert store.read_text("new.txt") == new_text + + # Test write bytes + new_bytes = b"New binary content" + store.write_bytes("new.bin", new_bytes) + assert store.read_bytes("new.bin") == new_bytes + + +@pytest.mark.xdist_group("storage") +def test_local_store_listing_operations(local_test_setup: Path) -> None: + """Test LocalStore listing operations.""" + from sqlspec.storage.backends.local import LocalStore + + store = LocalStore(str(local_test_setup)) + + # Test list_objects + objects = store.list_objects() + assert "test.txt" in objects + assert "test.bin" in objects + assert "subdir/nested.txt" in objects + + +@pytest.mark.xdist_group("storage") +def test_local_store_url_signing(local_test_setup: Path) -> None: + """Test LocalStore URL signing functionality.""" + from sqlspec.storage.backends.local import LocalStore + + store = LocalStore(str(local_test_setup)) + + # Test sign method + signed_url = store.sign("test.txt", expires_in=3600) + assert signed_url.startswith("file://") + assert "test.txt" in signed_url + + +@pytest.mark.xdist_group("storage") +async def test_local_store_async_operations(local_test_setup: Path) -> None: + """Test LocalStore async operations.""" + from sqlspec.storage.backends.local import LocalStore + + store = LocalStore(str(local_test_setup)) + + # Test async exists + exists = await store.exists_async("test.txt") + assert exists + + # Test async read operations + text_content = await store.read_text_async("test.txt") + assert text_content == TEST_TEXT_CONTENT + + binary_content = await store.read_bytes_async("test.bin") + assert binary_content == TEST_BINARY_CONTENT + + # Test async write operations + new_text = "Async new text content" + await store.write_text_async("async_new.txt", new_text) + assert await store.read_text_async("async_new.txt") == new_text + + +# FSSpec S3 backend tests + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_fsspec_s3_basic_operations( + fsspec_s3_backend: "ObjectStoreProtocol", minio_client: "Minio", minio_default_bucket_name: str +) -> None: + """Test FSSpec S3 backend basic operations.""" + # Ensure bucket exists (following Advanced Alchemy pattern) + assert minio_client.bucket_exists(minio_default_bucket_name), f"Bucket {minio_default_bucket_name} does not exist" + + # Test write and read text + test_path = "integration_test/test.txt" + fsspec_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + + content = fsspec_s3_backend.read_text(test_path) + assert content == TEST_TEXT_CONTENT + + # Test exists + assert fsspec_s3_backend.exists(test_path) + assert not fsspec_s3_backend.exists("nonexistent.txt") + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_fsspec_s3_binary_operations(fsspec_s3_backend: "ObjectStoreProtocol") -> None: + """Test FSSpec S3 backend binary operations.""" + test_path = "integration_test/binary.bin" + fsspec_s3_backend.write_bytes(test_path, TEST_BINARY_CONTENT) + + content = fsspec_s3_backend.read_bytes(test_path) + assert content == TEST_BINARY_CONTENT + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_fsspec_s3_async_operations(fsspec_s3_backend: "ObjectStoreProtocol") -> None: + """Test FSSpec S3 backend async operations.""" + test_path = "integration_test/async_test.txt" + + # Test async operations + await fsspec_s3_backend.write_text_async(test_path, TEST_TEXT_CONTENT) + content = await fsspec_s3_backend.read_text_async(test_path) + assert content == TEST_TEXT_CONTENT + + # Test async exists + exists = await fsspec_s3_backend.exists_async(test_path) + assert exists + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_fsspec_s3_listing_operations(fsspec_s3_backend: "ObjectStoreProtocol") -> None: + """Test FSSpec S3 backend listing operations.""" + # Write multiple test files + test_files = ["list_test/file1.txt", "list_test/file2.txt", "list_test/subdir/file3.txt"] + for file_path in test_files: + fsspec_s3_backend.write_text(file_path, f"Content of {file_path}") + + # Test list_objects + objects = fsspec_s3_backend.list_objects("list_test/") + assert len(objects) >= 3 + assert any("file1.txt" in obj for obj in objects) + assert any("file2.txt" in obj for obj in objects) + assert any("file3.txt" in obj for obj in objects) + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_fsspec_s3_copy_move_operations(fsspec_s3_backend: "ObjectStoreProtocol") -> None: + """Test FSSpec S3 backend copy and move operations.""" + # Setup source file + source_path = "copy_test/source.txt" + copy_path = "copy_test/copy.txt" + move_source_path = "move_test/source.txt" + move_dest_path = "move_test/moved.txt" + + fsspec_s3_backend.write_text(source_path, TEST_TEXT_CONTENT) + fsspec_s3_backend.write_text(move_source_path, TEST_TEXT_CONTENT) + + # Test copy + fsspec_s3_backend.copy(source_path, copy_path) + assert fsspec_s3_backend.exists(source_path) # Original should still exist + assert fsspec_s3_backend.exists(copy_path) + assert fsspec_s3_backend.read_text(copy_path) == TEST_TEXT_CONTENT + + # Test move + fsspec_s3_backend.move(move_source_path, move_dest_path) + assert not fsspec_s3_backend.exists(move_source_path) # Original should be gone + assert fsspec_s3_backend.exists(move_dest_path) + assert fsspec_s3_backend.read_text(move_dest_path) == TEST_TEXT_CONTENT + + +# ObStore S3 backend tests + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_obstore_s3_basic_operations( + obstore_s3_backend: "ObjectStoreProtocol", minio_client: "Minio", minio_default_bucket_name: str +) -> None: + """Test ObStore S3 backend basic operations.""" + # Ensure bucket exists (following Advanced Alchemy pattern) + assert minio_client.bucket_exists(minio_default_bucket_name), f"Bucket {minio_default_bucket_name} does not exist" + + test_path = "integration_test/obstore_test.txt" + + # Test write and read + obstore_s3_backend.write_text(test_path, TEST_TEXT_CONTENT) + content = obstore_s3_backend.read_text(test_path) + assert content == TEST_TEXT_CONTENT + + # Test exists + assert obstore_s3_backend.exists(test_path) + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_obstore_s3_binary_operations(obstore_s3_backend: "ObjectStoreProtocol") -> None: + """Test ObStore S3 backend binary operations.""" + test_path = "integration_test/obstore_binary.bin" + + obstore_s3_backend.write_bytes(test_path, TEST_BINARY_CONTENT) + content = obstore_s3_backend.read_bytes(test_path) + assert content == TEST_BINARY_CONTENT + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_obstore_s3_async_operations(obstore_s3_backend: "ObjectStoreProtocol") -> None: + """Test ObStore S3 backend async operations.""" + test_path = "integration_test/obstore_async.txt" + + # Test async operations + await obstore_s3_backend.write_text_async(test_path, TEST_TEXT_CONTENT) + content = await obstore_s3_backend.read_text_async(test_path) + assert content == TEST_TEXT_CONTENT + + exists = await obstore_s3_backend.exists_async(test_path) + assert exists + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_obstore_s3_listing_operations(obstore_s3_backend: "ObjectStoreProtocol") -> None: + """Test ObStore S3 backend listing operations.""" + # Write test files in different paths + test_files = ["obstore_list/file1.txt", "obstore_list/file2.txt", "obstore_list/subdir/file3.txt"] + for file_path in test_files: + obstore_s3_backend.write_text(file_path, f"ObStore content of {file_path}") + + # Test list_objects + objects = obstore_s3_backend.list_objects("obstore_list/") + assert len(objects) >= 3 + assert any("file1.txt" in obj for obj in objects) + assert any("file2.txt" in obj for obj in objects) + + +# Storage registry tests + + +@pytest.mark.xdist_group("storage") +def test_registry_uri_resolution_local(tmp_path: Path) -> None: + """Test storage registry URI resolution for local files.""" + from sqlspec.storage.backends.local import LocalStore + + # Test file URI resolution + test_file = tmp_path / "registry_test.txt" + test_file.write_text(TEST_TEXT_CONTENT) + + # Test file:// URI + file_uri = f"file://{test_file}" + backend = storage_registry.get(file_uri) + assert isinstance(backend, LocalStore) + + content = backend.read_text("registry_test.txt") + assert content == TEST_TEXT_CONTENT + + +@pytest.mark.xdist_group("storage") +def test_registry_path_resolution(tmp_path: Path) -> None: + """Test storage registry resolution for raw paths.""" + from sqlspec.storage.backends.local import LocalStore + + # Test Path object resolution + test_file = tmp_path / "path_test.txt" + test_file.write_text(TEST_TEXT_CONTENT) + + backend = storage_registry.get(tmp_path) + assert isinstance(backend, LocalStore) + + content = backend.read_text("path_test.txt") + assert content == TEST_TEXT_CONTENT + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_registry_s3_fsspec_resolution(minio_service: "MinioService", minio_default_bucket_name: str) -> None: + """Test storage registry S3 resolution with FSSpec backend.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + s3_uri = f"s3://{minio_default_bucket_name}/registry_test" + + backend = storage_registry.get( + s3_uri, + backend="fsspec", + endpoint_url=f"http://{minio_service.endpoint}", + key=minio_service.access_key, + secret=minio_service.secret_key, + use_ssl=False, + client_kwargs={"verify": False, "use_ssl": False}, + ) + + # Should get FSSpec backend for S3 + assert isinstance(backend, FSSpecBackend) + + # Test basic operations + test_path = "registry_fsspec_test.txt" + backend.write_text(test_path, TEST_TEXT_CONTENT) + content = backend.read_text(test_path) + assert content == TEST_TEXT_CONTENT + + +@pytest.mark.xdist_group("storage") +def test_registry_alias_registration( + minio_service: "MinioService", minio_default_bucket_name: str, tmp_path: Path +) -> None: + """Test storage registry alias registration and usage.""" + from sqlspec.storage.backends.local import LocalStore + + # Clear registry to avoid test interference + storage_registry.clear() + + try: + # Register local alias + storage_registry.register_alias("test-local", uri=f"file://{tmp_path / 'test_data'}") + + # Test local alias + backend = storage_registry.get("test-local") + assert isinstance(backend, LocalStore) + + # Create test data + backend.write_text("alias_test.txt", TEST_TEXT_CONTENT) + content = backend.read_text("alias_test.txt") + assert content == TEST_TEXT_CONTENT + + # Register S3 alias if fsspec available + if FSSPEC_INSTALLED: + from sqlspec.storage.backends.fsspec import FSSpecBackend + + storage_registry.register_alias( + "test-s3", + uri=f"s3://{minio_default_bucket_name}", + backend="fsspec", + endpoint_url=f"http://{minio_service.endpoint}", + key=minio_service.access_key, + secret=minio_service.secret_key, + use_ssl=False, + client_kwargs={"verify": False, "use_ssl": False}, + ) + + s3_backend = storage_registry.get("test-s3") + assert isinstance(s3_backend, FSSpecBackend) + + # Test S3 alias operations + s3_backend.write_text("s3_alias_test.txt", TEST_TEXT_CONTENT) + s3_content = s3_backend.read_text("s3_alias_test.txt") + assert s3_content == TEST_TEXT_CONTENT + + finally: + # Clean up registry + storage_registry.clear() + + +# Backend comparison tests + + +@pytest.fixture +def local_backend(tmp_path: Path) -> "ObjectStoreProtocol": + """Create LocalStore backend.""" + from sqlspec.storage.backends.local import LocalStore + + return LocalStore(str(tmp_path)) + + +@pytest.fixture +def fsspec_s3_backend_optional(minio_service: "MinioService", minio_default_bucket_name: str) -> "ObjectStoreProtocol": + """Create FSSpec S3 backend if available.""" + if not FSSPEC_INSTALLED: + pytest.skip("fsspec not installed") + + from sqlspec.storage.backends.fsspec import FSSpecBackend + + return FSSpecBackend.from_config( + { + "protocol": "s3", + "fs_config": { + "endpoint_url": f"http://{minio_service.host}:{minio_service.port}", + "key": minio_service.access_key, + "secret": minio_service.secret_key, + }, + "base_path": minio_default_bucket_name, + } + ) + + +@pytest.fixture +def obstore_s3_backend_optional(minio_service: "MinioService", minio_default_bucket_name: str) -> "ObjectStoreProtocol": + """Create ObStore S3 backend if available.""" + if not OBSTORE_INSTALLED: + pytest.skip("obstore not installed") + + from sqlspec.storage.backends.obstore import ObStoreBackend + + s3_uri = f"s3://{minio_default_bucket_name}" + return ObStoreBackend( + s3_uri, + aws_endpoint=f"http://{minio_service.endpoint}", + aws_access_key_id=minio_service.access_key, + aws_secret_access_key=minio_service.secret_key, + aws_virtual_hosted_style_request=False, + client_options={"allow_http": True}, + ) + + +@pytest.mark.xdist_group("storage") +@pytest.mark.parametrize("backend_name", ["local_backend", "fsspec_s3_backend_optional", "obstore_s3_backend_optional"]) +def test_backend_consistency(request: pytest.FixtureRequest, backend_name: str) -> None: + """Test that all backends provide consistent behavior.""" + backend = request.getfixturevalue(backend_name) + if backend is None: + pytest.skip(f"Backend {backend_name} not available") + + test_path = f"consistency_test_{backend_name}.txt" + + # Test write/read consistency + backend.write_text(test_path, TEST_TEXT_CONTENT) + content = backend.read_text(test_path) + assert content == TEST_TEXT_CONTENT + + # Test exists consistency + assert backend.exists(test_path) + + # Test URL signing consistency (all should return some form of URL) + signed_url = backend.sign(test_path, expires_in=3600) + assert isinstance(signed_url, str) + assert len(signed_url) > 0 + + +@pytest.mark.xdist_group("storage") +@pytest.mark.parametrize("backend_name", ["local_backend", "fsspec_s3_backend_optional", "obstore_s3_backend_optional"]) +async def test_backend_async_consistency(request: pytest.FixtureRequest, backend_name: str) -> None: + """Test that all backends provide consistent async behavior.""" + backend = request.getfixturevalue(backend_name) + if backend is None: + pytest.skip(f"Backend {backend_name} not available") + + test_path = f"async_consistency_{backend_name}.txt" + + # Test async write/read consistency + await backend.write_text_async(test_path, TEST_TEXT_CONTENT) + content = await backend.read_text_async(test_path) + assert content == TEST_TEXT_CONTENT + + # Test async exists consistency + exists = await backend.exists_async(test_path) + assert exists + + +# Error handling tests + + +@pytest.mark.xdist_group("storage") +def test_local_backend_error_handling(tmp_path: Path) -> None: + """Test LocalStore error handling for invalid operations.""" + from sqlspec.storage.backends.local import LocalStore + + backend = LocalStore(str(tmp_path)) + + # Test reading nonexistent file + with pytest.raises(FileNotFoundError): + backend.read_text("nonexistent.txt") + + with pytest.raises(FileNotFoundError): + backend.read_bytes("nonexistent.txt") + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_fsspec_s3_error_handling(minio_service: "MinioService", minio_default_bucket_name: str) -> None: + """Test FSSpec S3 backend error handling.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + backend = FSSpecBackend.from_config( + { + "protocol": "s3", + "fs_config": { + "endpoint_url": f"http://{minio_service.host}:{minio_service.port}", + "key": minio_service.access_key, + "secret": minio_service.secret_key, + }, + "base_path": minio_default_bucket_name, + } + ) + + # Test reading nonexistent file + with pytest.raises(FileNotFoundError): + backend.read_text("nonexistent.txt") + + +@pytest.mark.xdist_group("storage") +async def test_async_error_handling(tmp_path: Path) -> None: + """Test async error handling.""" + from sqlspec.storage.backends.local import LocalStore + + backend = LocalStore(str(tmp_path)) + + # Test async reading nonexistent file + with pytest.raises(FileNotFoundError): + await backend.read_text_async("nonexistent.txt") + + with pytest.raises(FileNotFoundError): + await backend.read_bytes_async("nonexistent.txt") + + +# Registry advanced tests + + +@pytest.mark.xdist_group("storage") +def test_registry_caching_behavior(tmp_path: Path) -> None: + """Test that storage registry properly caches backend instances.""" + storage_registry.clear() + + try: + uri = f"file://{tmp_path}" + + # Get same URI twice + backend1 = storage_registry.get(uri) + backend2 = storage_registry.get(uri) + + # Should return the same instance (cached) + assert backend1 is backend2 + + # Clear cache and get again + storage_registry.clear_cache(uri) + backend3 = storage_registry.get(uri) + + # Should be different instance after cache clear + assert backend1 is not backend3 + + finally: + storage_registry.clear() + + +@pytest.mark.xdist_group("storage") +def test_registry_alias_management(tmp_path: Path) -> None: + """Test storage registry alias management features.""" + storage_registry.clear() + + try: + # Register alias + alias_name = "test-management" + storage_registry.register_alias(alias_name, uri=f"file://{tmp_path}") + + # Test alias registration check + assert storage_registry.is_alias_registered(alias_name) + assert not storage_registry.is_alias_registered("nonexistent-alias") + + # Test list aliases + aliases = storage_registry.list_aliases() + assert alias_name in aliases + + # Test clearing aliases + storage_registry.clear_aliases() + assert not storage_registry.is_alias_registered(alias_name) + + finally: + storage_registry.clear() + + +@pytest.mark.xdist_group("storage") +def test_registry_backend_fallback_order( + tmp_path: Path, minio_service: "MinioService", minio_default_bucket_name: str +) -> None: + """Test that registry follows correct backend fallback order.""" + from sqlspec.storage.backends.local import LocalStore + + storage_registry.clear() + + try: + # Test local file resolution (should always use LocalStore) + local_uri = f"file://{tmp_path}" + local_backend = storage_registry.get(local_uri) + assert isinstance(local_backend, LocalStore) + + # Test S3 resolution (should prefer ObStore > FSSpec if available) + s3_uri = f"s3://{minio_default_bucket_name}" + s3_backend = storage_registry.get( + s3_uri, + endpoint_url=f"http://{minio_service.host}:{minio_service.port}", + aws_access_key_id=minio_service.access_key, + aws_secret_access_key=minio_service.secret_key, + ) + + # Should get ObStore if available, else FSSpec, else error + if OBSTORE_INSTALLED: + from sqlspec.storage.backends.obstore import ObStoreBackend + + assert isinstance(s3_backend, ObStoreBackend) + elif FSSPEC_INSTALLED: + from sqlspec.storage.backends.fsspec import FSSpecBackend + + assert isinstance(s3_backend, FSSpecBackend) + else: + # Should raise MissingDependencyError if no cloud backends available + pass + + finally: + storage_registry.clear() + + +# Arrow integration tests + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") +def test_local_arrow_operations(tmp_path: Path) -> None: + """Test LocalStore Arrow operations if pyarrow is available.""" + from sqlspec.storage.backends.local import LocalStore + + backend = LocalStore(str(tmp_path)) + + # Create test Arrow data + import pyarrow as pa + + data = {"col1": [1, 2, 3, 4], "col2": ["a", "b", "c", "d"], "col3": [1.1, 2.2, 3.3, 4.4]} + table = pa.table(data) + + # Test write/read Arrow table + arrow_path = "arrow_test.parquet" + backend.write_arrow(arrow_path, table) + + read_table = backend.read_arrow(arrow_path) + assert read_table.equals(table) + + # Test exists for Arrow file + assert backend.exists(arrow_path) + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +@pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") +def test_fsspec_s3_arrow_operations(minio_service: "MinioService", minio_default_bucket_name: str) -> None: + """Test FSSpec S3 backend Arrow operations if pyarrow is available.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + backend = FSSpecBackend.from_config( + { + "protocol": "s3", + "fs_config": { + "endpoint_url": f"http://{minio_service.host}:{minio_service.port}", + "key": minio_service.access_key, + "secret": minio_service.secret_key, + }, + "base_path": minio_default_bucket_name, + } + ) + + import pyarrow as pa + + # Create test data with different types + data = { + "integers": [1, 2, 3, 4, 5], + "strings": ["hello", "world", "storage", "test", "arrow"], + "floats": [1.1, 2.2, 3.3, 4.4, 5.5], + "booleans": [True, False, True, False, True], + } + table = pa.table(data) + + # Test S3 Arrow operations + s3_arrow_path = "s3_arrow_test.parquet" + backend.write_arrow(s3_arrow_path, table) + + read_table = backend.read_arrow(s3_arrow_path) + assert read_table.equals(table) + + +# Performance tests + + +@pytest.mark.xdist_group("storage") +def test_local_backend_large_file_operations(tmp_path: Path) -> None: + """Test LocalStore with larger file operations.""" + from sqlspec.storage.backends.local import LocalStore + + backend = LocalStore(str(tmp_path)) + + # Create larger test content + large_text = "Large file content line\n" * 1000 + large_binary = b"Binary data chunk" * 1000 + + # Test large text operations + large_text_path = "large_test.txt" + backend.write_text(large_text_path, large_text) + read_content = backend.read_text(large_text_path) + assert read_content == large_text + assert len(read_content) == len(large_text) + + # Test large binary operations + large_binary_path = "large_test.bin" + backend.write_bytes(large_binary_path, large_binary) + read_binary = backend.read_bytes(large_binary_path) + assert read_binary == large_binary + assert len(read_binary) == len(large_binary) + + +@pytest.mark.xdist_group("storage") +async def test_concurrent_storage_operations(tmp_path: Path) -> None: + """Test concurrent storage operations.""" + import asyncio + + from sqlspec.storage.backends.local import LocalStore + + backend = LocalStore(str(tmp_path)) + + async def write_test_file(index: int) -> None: + """Write a test file asynchronously.""" + path = f"concurrent_test_{index}.txt" + content = f"Concurrent test content {index}" + await backend.write_text_async(path, content) + + # Verify the write + read_content = await backend.read_text_async(path) + assert read_content == content + + # Run multiple concurrent writes + tasks = [write_test_file(i) for i in range(10)] + await asyncio.gather(*tasks) + + # Verify all files exist + for i in range(10): + assert backend.exists(f"concurrent_test_{i}.txt") + + +# Metadata tests + + +@pytest.mark.xdist_group("storage") +def test_local_metadata_operations(tmp_path: Path) -> None: + """Test LocalStore metadata retrieval.""" + from sqlspec.storage.backends.local import LocalStore + + backend = LocalStore(str(tmp_path)) + + # Create test file + test_path = "metadata_test.txt" + backend.write_text(test_path, TEST_TEXT_CONTENT) + + # Test metadata retrieval + metadata = backend.get_metadata(test_path) + assert metadata is not None + assert "size" in metadata + assert metadata["size"] == len(TEST_TEXT_CONTENT.encode()) + + # Test metadata for binary file + binary_path = "metadata_binary.bin" + backend.write_bytes(binary_path, TEST_BINARY_CONTENT) + + binary_metadata = backend.get_metadata(binary_path) + assert binary_metadata is not None + assert binary_metadata["size"] == len(TEST_BINARY_CONTENT) + + +@pytest.mark.xdist_group("storage") +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_fsspec_s3_metadata_operations(minio_service: "MinioService", minio_default_bucket_name: str) -> None: + """Test FSSpec S3 backend metadata operations.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + backend = FSSpecBackend.from_config( + { + "protocol": "s3", + "fs_config": { + "endpoint_url": f"http://{minio_service.host}:{minio_service.port}", + "key": minio_service.access_key, + "secret": minio_service.secret_key, + }, + "base_path": minio_default_bucket_name, + } + ) + + # Test S3 metadata + test_path = "s3_metadata_test.txt" + backend.write_text(test_path, TEST_TEXT_CONTENT) + + metadata = backend.get_metadata(test_path) + assert metadata is not None + assert "size" in metadata diff --git a/tests/unit/test_loader/test_loading_patterns.py b/tests/unit/test_loader/test_loading_patterns.py index 4fb5d3690..61ad06048 100644 --- a/tests/unit/test_loader/test_loading_patterns.py +++ b/tests/unit/test_loader/test_loading_patterns.py @@ -623,186 +623,201 @@ def test_unicode_filename_handling() -> None: assert "unicode_filename_query" in queries -class TestFixturePerformanceTests: - """Performance tests using real fixture files.""" - - @pytest.fixture - def fixtures_path(self) -> Path: - """Get path to test fixtures directory.""" - return Path(__file__).parent.parent.parent / "fixtures" - - def test_large_fixture_loading_performance(self, fixtures_path: Path) -> None: - """Test performance loading large fixture files.""" - import time - - large_fixtures = [ - "postgres/collection-database_details.sql", - "postgres/collection-table_details.sql", - "postgres/collection-schema_details.sql", - "mysql/collection-database_details.sql", - "mysql/collection-table_details.sql", - ] +@pytest.fixture +def fixtures_path() -> Path: + """Get path to test fixtures directory.""" + return Path(__file__).parent.parent.parent / "fixtures" - performance_results = {} - for fixture_path in large_fixtures: - fixture_file = fixtures_path / fixture_path - if not fixture_file.exists(): - continue +def test_large_fixture_loading_performance(fixtures_path: Path) -> None: + """Test performance loading large fixture files.""" + import time - loader = SQLFileLoader() + from sqlspec.loader import SQLFileLoader - start_time = time.time() - loader.load_sql(fixture_file) - load_time = time.time() - start_time + large_fixtures = [ + "postgres/collection-database_details.sql", + "postgres/collection-table_details.sql", + "postgres/collection-schema_details.sql", + "mysql/collection-database_details.sql", + "mysql/collection-table_details.sql", + ] - queries = loader.list_queries() - performance_results[fixture_path] = { - "load_time": load_time, - "query_count": len(queries), - "file_size": fixture_file.stat().st_size, - } + performance_results = {} - assert load_time < 2.0, f"Loading {fixture_path} took too long: {load_time:.3f}s" - assert len(queries) > 0, f"No queries loaded from {fixture_path}" + for fixture_path in large_fixtures: + fixture_file = fixtures_path / fixture_path + if not fixture_file.exists(): + continue - if queries: - test_query = queries[0] - sql_start = time.time() - sql_obj = loader.get_sql(test_query) - sql_time = time.time() - sql_start + loader = SQLFileLoader() - assert sql_time < 0.1, f"SQL object creation too slow: {sql_time:.3f}s" - assert isinstance(sql_obj, SQL) + start_time = time.time() + loader.load_sql(fixture_file) + load_time = time.time() - start_time - def test_multiple_fixture_batch_loading(self, fixtures_path: Path) -> None: - """Test performance when loading multiple fixture files at once.""" - import time + queries = loader.list_queries() + performance_results[fixture_path] = { + "load_time": load_time, + "query_count": len(queries), + "file_size": fixture_file.stat().st_size, + } - fixture_files = [ - fixtures_path / "init.sql", - fixtures_path / "postgres" / "collection-extensions.sql", - fixtures_path / "mysql" / "collection-engines.sql", - fixtures_path / "postgres" / "collection-privileges.sql", - ] + assert load_time < 2.0, f"Loading {fixture_path} took too long: {load_time:.3f}s" + assert len(queries) > 0, f"No queries loaded from {fixture_path}" - existing_files = [f for f in fixture_files if f.exists()] - if len(existing_files) < 2: - pytest.skip("Need at least 2 fixture files for batch loading test") + if queries: + test_query = queries[0] + sql_start = time.time() + sql_obj = loader.get_sql(test_query) + sql_time = time.time() - sql_start - loader = SQLFileLoader() + assert sql_time < 0.1, f"SQL object creation too slow: {sql_time:.3f}s" + assert isinstance(sql_obj, SQL) - start_time = time.time() - loader.load_sql(*existing_files) - total_load_time = time.time() - start_time - all_queries = loader.list_queries() - assert len(all_queries) > 0 +def test_multiple_fixture_batch_loading(fixtures_path: Path) -> None: + """Test performance when loading multiple fixture files at once.""" + import time - assert total_load_time < 3.0, f"Batch loading took too long: {total_load_time:.3f}s" + from sqlspec.loader import SQLFileLoader - loaded_files = loader.list_files() - for fixture_file in existing_files: - assert str(fixture_file) in loaded_files + fixture_files = [ + fixtures_path / "init.sql", + fixtures_path / "postgres" / "collection-extensions.sql", + fixtures_path / "mysql" / "collection-engines.sql", + fixtures_path / "postgres" / "collection-privileges.sql", + ] - def test_fixture_directory_scanning_performance(self, fixtures_path: Path) -> None: - """Test performance when scanning fixture directories.""" - import time + existing_files = [f for f in fixture_files if f.exists()] + if len(existing_files) < 2: + pytest.skip("Need at least 2 fixture files for batch loading test") - test_dirs = [fixtures_path / "postgres", fixtures_path / "mysql"] + loader = SQLFileLoader() - for test_dir in test_dirs: - if not test_dir.exists(): - continue + start_time = time.time() + loader.load_sql(*existing_files) + total_load_time = time.time() - start_time - loader = SQLFileLoader() + all_queries = loader.list_queries() + assert len(all_queries) > 0 - start_time = time.time() - loader.load_sql(test_dir) - scan_time = time.time() - start_time + assert total_load_time < 3.0, f"Batch loading took too long: {total_load_time:.3f}s" - queries = loader.list_queries() - files = loader.list_files() + loaded_files = loader.list_files() + for fixture_file in existing_files: + assert str(fixture_file) in loaded_files - assert scan_time < 5.0, f"Directory scanning took too long: {scan_time:.3f}s" - assert len(queries) > 0, f"No queries found in {test_dir}" - assert len(files) > 0, f"No files loaded from {test_dir}" - if test_dir.name in ["postgres", "mysql"]: - assert len(queries) > 0, f"No queries found in {test_dir}" +def test_fixture_directory_scanning_performance(fixtures_path: Path) -> None: + """Test performance when scanning fixture directories.""" + import time - def test_fixture_cache_performance(self, fixtures_path: Path) -> None: - """Test performance benefits of caching with fixture files.""" - import time + from sqlspec.loader import SQLFileLoader - fixture_file = fixtures_path / "postgres" / "collection-database_details.sql" - if not fixture_file.exists(): - pytest.skip("Large fixture file not available") + test_dirs = [fixtures_path / "postgres", fixtures_path / "mysql"] - loader1 = SQLFileLoader() - start_time = time.time() - loader1.load_sql(fixture_file) - first_load_time = time.time() - start_time + for test_dir in test_dirs: + if not test_dir.exists(): + continue + + loader = SQLFileLoader() start_time = time.time() - loader1.load_sql(fixture_file) - cached_load_time = time.time() - start_time + loader.load_sql(test_dir) + scan_time = time.time() - start_time - assert cached_load_time <= first_load_time, "Cached load should not be slower than first load" + queries = loader.list_queries() + files = loader.list_files() - queries1 = loader1.list_queries() - assert len(queries1) > 0 + assert scan_time < 5.0, f"Directory scanning took too long: {scan_time:.3f}s" + assert len(queries) > 0, f"No queries found in {test_dir}" + assert len(files) > 0, f"No files loaded from {test_dir}" - def test_concurrent_fixture_access_simulation(self, fixtures_path: Path) -> None: - """Test simulated concurrent access to fixture files.""" - import time + if test_dir.name in ["postgres", "mysql"]: + assert len(queries) > 0, f"No queries found in {test_dir}" - fixture_file = fixtures_path / "init.sql" - loaders = [] - load_times = [] +def test_fixture_cache_performance(fixtures_path: Path) -> None: + """Test performance benefits of caching with fixture files.""" + import time - for i in range(5): - loader = SQLFileLoader() + from sqlspec.loader import SQLFileLoader - start_time = time.time() - loader.load_sql(fixture_file) - load_time = time.time() - start_time + fixture_file = fixtures_path / "postgres" / "collection-database_details.sql" + if not fixture_file.exists(): + pytest.skip("Large fixture file not available") - loaders.append(loader) - load_times.append(load_time) + loader1 = SQLFileLoader() + start_time = time.time() + loader1.load_sql(fixture_file) + first_load_time = time.time() - start_time - queries = loader.list_queries() - assert len(queries) > 0 + start_time = time.time() + loader1.load_sql(fixture_file) + cached_load_time = time.time() - start_time - assert load_time < 1.0, f"Load {i + 1} took too long: {load_time:.3f}s" + assert cached_load_time <= first_load_time, "Cached load should not be slower than first load" - base_queries = set(loaders[0].list_queries()) - for loader in loaders[1:]: - assert set(loader.list_queries()) == base_queries + queries1 = loader1.list_queries() + assert len(queries1) > 0 - def test_memory_usage_with_large_fixtures(self, fixtures_path: Path) -> None: - """Test memory usage patterns with large fixture files.""" - large_fixtures = ["postgres/collection-database_details.sql", "postgres/collection-table_details.sql"] +def test_concurrent_fixture_access_simulation(fixtures_path: Path) -> None: + """Test simulated concurrent access to fixture files.""" + import time + + from sqlspec.loader import SQLFileLoader + + fixture_file = fixtures_path / "init.sql" + + loaders = [] + load_times = [] + + for i in range(5): loader = SQLFileLoader() - initial_query_count = len(loader.list_queries()) - for fixture_path in large_fixtures: - fixture_file = fixtures_path / fixture_path - if not fixture_file.exists(): - continue + start_time = time.time() + loader.load_sql(fixture_file) + load_time = time.time() - start_time + + loaders.append(loader) + load_times.append(load_time) + + queries = loader.list_queries() + assert len(queries) > 0 + + assert load_time < 1.0, f"Load {i + 1} took too long: {load_time:.3f}s" - loader.load_sql(fixture_file) + base_queries = set(loaders[0].list_queries()) + for loader in loaders[1:]: + assert set(loader.list_queries()) == base_queries - queries = loader.list_queries() - assert len(queries) > initial_query_count +def test_memory_usage_with_large_fixtures(fixtures_path: Path) -> None: + """Test memory usage patterns with large fixture files.""" + from sqlspec.loader import SQLFileLoader + + large_fixtures = ["postgres/collection-database_details.sql", "postgres/collection-table_details.sql"] + + loader = SQLFileLoader() + initial_query_count = len(loader.list_queries()) + + for fixture_path in large_fixtures: + fixture_file = fixtures_path / fixture_path + if not fixture_file.exists(): + continue + + loader.load_sql(fixture_file) + + queries = loader.list_queries() + + assert len(queries) > initial_query_count - for query_name in queries[:5]: - sql_obj = loader.get_sql(query_name) - assert isinstance(sql_obj, SQL) + for query_name in queries[:5]: + sql_obj = loader.get_sql(query_name) + assert isinstance(sql_obj, SQL) - assert len(str(sql_obj)) < 50000 + assert len(str(sql_obj)) < 50000 - initial_query_count = len(queries) + initial_query_count = len(queries) diff --git a/tests/unit/test_loader/test_sql_file_loader.py b/tests/unit/test_loader/test_sql_file_loader.py index ccc44234b..07a9d2357 100644 --- a/tests/unit/test_loader/test_sql_file_loader.py +++ b/tests/unit/test_loader/test_sql_file_loader.py @@ -587,7 +587,7 @@ def test_query_name_normalization_edge_cases() -> None: ("with_underscores", "with_underscores"), ("trailing-special!", "trailing_special"), ("multiple-hyphens-here", "multiple_hyphens_here"), - ("mixed-_styles", "mixed__styles"), + ("mixed-_styles", "mixed_styles"), ("ending$", "ending"), ("complex-name$!", "complex_name"), ] @@ -659,7 +659,7 @@ def test_dialect_aliases_parametrized(dialect: str, expected: str) -> None: ("name$", "name"), ("name!", "name"), ("name$!", "name"), - ("complex-name-with$special!", "complex_name_withspecial"), + ("complex-name-with$special!", "complex_name_with_special"), ], ) def test_query_name_normalization_parametrized(name: str, expected: str) -> None: @@ -670,246 +670,277 @@ def test_query_name_normalization_parametrized(name: str, expected: str) -> None assert result == expected -class TestFixtureBasedParsing: - """Test SQL file parsing using real fixture files.""" +@pytest.fixture +def fixture_parsing_path() -> Path: + """Get path to test fixtures directory for parsing tests.""" + return Path(__file__).parent.parent.parent / "fixtures" - @pytest.fixture - def fixtures_path(self) -> Path: - """Get path to test fixtures directory.""" - return Path(__file__).parent.parent.parent / "fixtures" - def test_parse_postgres_database_details_fixture(self, fixtures_path: Path) -> None: - """Test parsing complex PostgreSQL database details fixture.""" - fixture_file = fixtures_path / "postgres" / "collection-database_details.sql" +def test_parse_postgres_database_details_fixture(fixture_parsing_path: Path) -> None: + """Test parsing complex PostgreSQL database details fixture.""" + from sqlspec.loader import NamedStatement, SQLFileLoader - content = fixture_file.read_text(encoding="utf-8") + fixture_file = fixture_parsing_path / "postgres" / "collection-database_details.sql" - statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) + content = fixture_file.read_text(encoding="utf-8") - expected_queries = [ - "collection_postgres_base_database_details", - "collection_postgres_13_database_details", - "collection_postgres_12_database_details", - ] + statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - assert len(statements) == len(expected_queries) - for query_name in expected_queries: - assert query_name in statements - stmt = statements[query_name] - assert isinstance(stmt, NamedStatement) - assert stmt.name == query_name - assert "database_oid" in stmt.sql - assert ":PKEY" in stmt.sql or ":DMA_SOURCE_ID" in stmt.sql + expected_queries = [ + "collection_postgres_base_database_details", + "collection_postgres_13_database_details", + "collection_postgres_12_database_details", + ] - def test_parse_mysql_data_types_fixture(self, fixtures_path: Path) -> None: - """Test parsing MySQL data types fixture.""" - fixture_file = fixtures_path / "mysql" / "collection-data_types.sql" + assert len(statements) == len(expected_queries) + for query_name in expected_queries: + assert query_name in statements + stmt = statements[query_name] + assert isinstance(stmt, NamedStatement) + assert stmt.name == query_name + assert "database_oid" in stmt.sql + assert ":PKEY" in stmt.sql or ":DMA_SOURCE_ID" in stmt.sql - with open(fixture_file, encoding="utf-8") as f: - content = f.read() +def test_parse_mysql_data_types_fixture(fixture_parsing_path: Path) -> None: + """Test parsing MySQL data types fixture.""" + from sqlspec.loader import SQLFileLoader + + fixture_file = fixture_parsing_path / "mysql" / "collection-data_types.sql" + + with open(fixture_file, encoding="utf-8") as f: + content = f.read() + + statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) + + assert len(statements) == 1 + assert "collection_mysql_data_types" in statements + + stmt = statements["collection_mysql_data_types"] + assert "information_schema.columns" in stmt.sql + assert "@PKEY" in stmt.sql or "@DMA_SOURCE_ID" in stmt.sql + + +def test_parse_init_fixture(fixture_parsing_path: Path) -> None: + """Test parsing the init.sql fixture with multiple small queries.""" + from sqlspec.loader import SQLFileLoader + + fixture_file = fixture_parsing_path / "init.sql" + + with open(fixture_file, encoding="utf-8") as f: + content = f.read() + + statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) + + expected_queries = [ + "readiness_check_init_get_db_count", + "readiness_check_init_get_execution_id", + "readiness_check_init_get_source_id", + ] + + assert len(statements) == len(expected_queries) + for query_name in expected_queries: + assert query_name in statements + + +def test_parse_oracle_ddl_fixture(fixture_parsing_path: Path) -> None: + """Test parsing Oracle DDL fixture for complex SQL structures.""" + from sqlspec.exceptions import SQLFileParseError + from sqlspec.loader import NamedStatement, SQLFileLoader + + fixture_file = fixture_parsing_path / "oracle.ddl.sql" + + if not fixture_file.exists(): + pytest.skip("Oracle DDL fixture not found") + + with open(fixture_file, encoding="utf-8") as f: + content = f.read() + + try: statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - assert len(statements) == 1 - assert "collection_mysql_data_types" in statements + for stmt_name, stmt in statements.items(): + assert isinstance(stmt, NamedStatement) + assert stmt.name == stmt_name + assert len(stmt.sql.strip()) > 0 + except SQLFileParseError as e: + assert "No named SQL statements found" in str(e) + - stmt = statements["collection_mysql_data_types"] - assert "information_schema.columns" in stmt.sql - assert "@PKEY" in stmt.sql or "@DMA_SOURCE_ID" in stmt.sql +def test_large_fixture_parsing_performance(fixture_parsing_path: Path) -> None: + """Test parsing performance with large fixture files.""" + from sqlspec.loader import SQLFileLoader - def test_parse_init_fixture(self, fixtures_path: Path) -> None: - """Test parsing the init.sql fixture with multiple small queries.""" - fixture_file = fixtures_path / "init.sql" + large_fixtures = [ + "postgres/collection-database_details.sql", + "postgres/collection-table_details.sql", + "mysql/collection-database_details.sql", + ] + + SQLFileLoader() + + for fixture_path in large_fixtures: + fixture_file = fixture_parsing_path / fixture_path + if not fixture_file.exists(): + continue with open(fixture_file, encoding="utf-8") as f: content = f.read() + start_time = time.time() statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) + parse_time = time.time() - start_time - expected_queries = [ - "readiness_check_init_get_db_count", - "readiness_check_init_get_execution_id", - "readiness_check_init_get_source_id", - ] + assert parse_time < 0.5, f"Parsing {fixture_path} took too long: {parse_time:.3f}s" + assert len(statements) > 0, f"No statements found in {fixture_path}" - assert len(statements) == len(expected_queries) - for query_name in expected_queries: - assert query_name in statements - def test_parse_oracle_ddl_fixture(self, fixtures_path: Path) -> None: - """Test parsing Oracle DDL fixture for complex SQL structures.""" - fixture_file = fixtures_path / "oracle.ddl.sql" +def test_fixture_parameter_style_detection(fixture_parsing_path: Path) -> None: + """Test parameter style detection in fixture files.""" + from sqlspec.loader import SQLFileLoader + test_cases = [ + ("postgres/collection-database_details.sql", ":PKEY"), + ("mysql/collection-data_types.sql", "@PKEY"), + ("init.sql", "pg_control_system"), + ] + + for fixture_path, expected_pattern in test_cases: + fixture_file = fixture_parsing_path / fixture_path if not fixture_file.exists(): - pytest.skip("Oracle DDL fixture not found") + continue with open(fixture_file, encoding="utf-8") as f: content = f.read() - try: - statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) + statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - for stmt_name, stmt in statements.items(): - assert isinstance(stmt, NamedStatement) - assert stmt.name == stmt_name - assert len(stmt.sql.strip()) > 0 - except SQLFileParseError as e: - assert "No named SQL statements found" in str(e) + found_pattern = False + for stmt in statements.values(): + if expected_pattern in stmt.sql: + found_pattern = True + break - def test_large_fixture_parsing_performance(self, fixtures_path: Path) -> None: - """Test parsing performance with large fixture files.""" - large_fixtures = [ - "postgres/collection-database_details.sql", - "postgres/collection-table_details.sql", - "mysql/collection-database_details.sql", - ] + assert found_pattern, f"Pattern '{expected_pattern}' not found in {fixture_path}" - SQLFileLoader() - for fixture_path in large_fixtures: - fixture_file = fixtures_path / fixture_path - if not fixture_file.exists(): - continue +def test_complex_cte_parsing_from_fixtures(fixture_parsing_path: Path) -> None: + """Test parsing complex CTE queries from fixtures.""" + from sqlspec.loader import SQLFileLoader - with open(fixture_file, encoding="utf-8") as f: - content = f.read() + fixture_file = fixture_parsing_path / "postgres" / "collection-database_details.sql" - start_time = time.time() - statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - parse_time = time.time() - start_time + with open(fixture_file, encoding="utf-8") as f: + content = f.read() - assert parse_time < 0.5, f"Parsing {fixture_path} took too long: {parse_time:.3f}s" - assert len(statements) > 0, f"No statements found in {fixture_path}" + statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - def test_fixture_parameter_style_detection(self, fixtures_path: Path) -> None: - """Test parameter style detection in fixture files.""" - test_cases = [ - ("postgres/collection-database_details.sql", ":PKEY"), - ("mysql/collection-data_types.sql", "@PKEY"), - ("init.sql", "pg_control_system"), - ] + for stmt in statements.values(): + sql = stmt.sql.upper() + if "WITH" in sql: + assert "SELECT" in sql - for fixture_path, expected_pattern in test_cases: - fixture_file = fixtures_path / fixture_path - if not fixture_file.exists(): - continue + assert "JOIN" in sql or "WHERE" in sql or "FROM" in sql - with open(fixture_file, encoding="utf-8") as f: - content = f.read() - statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) +def test_multi_dialect_fixture_parsing(fixture_parsing_path: Path) -> None: + """Test parsing fixtures from multiple database dialects.""" + from sqlspec.exceptions import SQLFileParseError + from sqlspec.loader import NamedStatement, SQLFileLoader + + dialect_fixtures = [ + ("postgres", "collection-extensions.sql"), + ("mysql", "collection-engines.sql"), + ("oracle.ddl.sql", None), + ] - found_pattern = False - for stmt in statements.values(): - if expected_pattern in stmt.sql: - found_pattern = True - break + SQLFileLoader() - assert found_pattern, f"Pattern '{expected_pattern}' not found in {fixture_path}" + for dialect_info in dialect_fixtures: + if len(dialect_info) == 2 and dialect_info[1] is not None: + dialect_dir, filename = dialect_info + assert filename is not None + fixture_file = fixture_parsing_path / dialect_dir / filename + else: + fixture_file = fixture_parsing_path / dialect_info[0] - def test_complex_cte_parsing_from_fixtures(self, fixtures_path: Path) -> None: - """Test parsing complex CTE queries from fixtures.""" - fixture_file = fixtures_path / "postgres" / "collection-database_details.sql" + if not fixture_file.exists(): + continue with open(fixture_file, encoding="utf-8") as f: content = f.read() - statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - - for stmt in statements.values(): - sql = stmt.sql.upper() - if "WITH" in sql: - assert "SELECT" in sql - - assert "JOIN" in sql or "WHERE" in sql or "FROM" in sql - - def test_multi_dialect_fixture_parsing(self, fixtures_path: Path) -> None: - """Test parsing fixtures from multiple database dialects.""" - dialect_fixtures = [ - ("postgres", "collection-extensions.sql"), - ("mysql", "collection-engines.sql"), - ("oracle.ddl.sql", None), - ] + try: + statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) - SQLFileLoader() + for stmt_name, stmt in statements.items(): + assert isinstance(stmt, NamedStatement) + assert len(stmt.sql.strip()) > 0 - for dialect_info in dialect_fixtures: - if len(dialect_info) == 2 and dialect_info[1] is not None: - dialect_dir, filename = dialect_info - assert filename is not None # Type guard for mypy - fixture_file = fixtures_path / dialect_dir / filename - else: - fixture_file = fixtures_path / dialect_info[0] + assert stmt.name == stmt_name - if not fixture_file.exists(): - continue + except SQLFileParseError: + pass - with open(fixture_file, encoding="utf-8") as f: - content = f.read() - try: - statements = SQLFileLoader._parse_sql_content(content, str(fixture_file)) +@pytest.fixture +def fixture_integration_path() -> Path: + """Get path to test fixtures directory for integration tests.""" + return Path(__file__).parent.parent.parent / "fixtures" - for stmt_name, stmt in statements.items(): - assert isinstance(stmt, NamedStatement) - assert len(stmt.sql.strip()) > 0 - assert stmt.name == stmt_name +def test_load_and_execute_fixture_queries(fixture_integration_path: Path) -> None: + """Test loading and creating SQL objects from fixture queries.""" + from sqlspec.core.statement import SQL + from sqlspec.loader import SQLFileLoader - except SQLFileParseError: - pass + fixture_file = fixture_integration_path / "init.sql" + loader = SQLFileLoader() + loader.load_sql(fixture_file) -class TestFixtureBasedIntegration: - """Test loader integration using fixture files.""" + queries = loader.list_queries() + assert len(queries) >= 3 - @pytest.fixture - def fixtures_path(self) -> Path: - """Get path to test fixtures directory.""" - return Path(__file__).parent.parent.parent / "fixtures" + for query_name in queries: + sql = loader.get_sql(query_name) + assert isinstance(sql, SQL) + assert len(sql.sql.strip()) > 0 - def test_load_and_execute_fixture_queries(self, fixtures_path: Path) -> None: - """Test loading and creating SQL objects from fixture queries.""" - fixture_file = fixtures_path / "init.sql" - loader = SQLFileLoader() - loader.load_sql(fixture_file) +def test_fixture_query_metadata_preservation(fixture_integration_path: Path) -> None: + """Test that fixture query metadata is preserved.""" + from sqlspec.loader import SQLFileLoader - queries = loader.list_queries() - assert len(queries) >= 3 + fixture_file = fixture_integration_path / "postgres" / "collection-database_details.sql" - for query_name in queries: - sql = loader.get_sql(query_name) - assert isinstance(sql, SQL) - assert len(sql.sql.strip()) > 0 + loader = SQLFileLoader() + loader.load_sql(fixture_file) - def test_fixture_query_metadata_preservation(self, fixtures_path: Path) -> None: - """Test that fixture query metadata is preserved.""" - fixture_file = fixtures_path / "postgres" / "collection-database_details.sql" + files = loader.list_files() + assert str(fixture_file) in files - loader = SQLFileLoader() - loader.load_sql(fixture_file) + queries = loader.list_queries() + for query_name in queries: + file_info = loader.get_file_for_query(query_name) + assert file_info is not None + assert fixture_file.name in file_info.path - files = loader.list_files() - assert str(fixture_file) in files - queries = loader.list_queries() - for query_name in queries: - file_info = loader.get_file_for_query(query_name) - assert file_info is not None - assert fixture_file.name in file_info.path +def test_fixture_parameter_extraction(fixture_integration_path: Path) -> None: + """Test parameter extraction from fixture queries.""" + from sqlspec.core.statement import SQL + from sqlspec.loader import SQLFileLoader - def test_fixture_parameter_extraction(self, fixtures_path: Path) -> None: - """Test parameter extraction from fixture queries.""" - fixture_file = fixtures_path / "postgres" / "collection-database_details.sql" + fixture_file = fixture_integration_path / "postgres" / "collection-database_details.sql" - loader = SQLFileLoader() - loader.load_sql(fixture_file) + loader = SQLFileLoader() + loader.load_sql(fixture_file) - queries = loader.list_queries() - test_query = queries[0] + queries = loader.list_queries() + test_query = queries[0] - sql = loader.get_sql(test_query) - assert isinstance(sql, SQL) + sql = loader.get_sql(test_query) + assert isinstance(sql, SQL) - assert sql.parameters == [] + assert sql.parameters == [] diff --git a/tests/unit/test_storage/__init__.py b/tests/unit/test_storage/__init__.py new file mode 100644 index 000000000..38838a53c --- /dev/null +++ b/tests/unit/test_storage/__init__.py @@ -0,0 +1 @@ +"""Unit tests for storage backends.""" diff --git a/tests/unit/test_storage/test_fsspec_backend.py b/tests/unit/test_storage/test_fsspec_backend.py new file mode 100644 index 000000000..35a083a0e --- /dev/null +++ b/tests/unit/test_storage/test_fsspec_backend.py @@ -0,0 +1,499 @@ +"""Unit tests for FSSpecBackend.""" + +import tempfile +from pathlib import Path + +import pytest + +from sqlspec.exceptions import MissingDependencyError +from sqlspec.typing import FSSPEC_INSTALLED, PYARROW_INSTALLED + +if FSSPEC_INSTALLED: + from sqlspec.storage.backends.fsspec import FSSpecBackend + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_init_with_filesystem_string() -> None: + """Test initialization with filesystem string.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + store = FSSpecBackend("file") + assert store.protocol == "file" + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_init_with_uri() -> None: + """Test initialization with URI.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + store = FSSpecBackend("file:///tmp") + assert store.protocol == "file" + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_from_config() -> None: + """Test from_config class method.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + config = {"protocol": "file", "base_path": "/tmp/test", "fs_config": {}} + store = FSSpecBackend.from_config(config) + assert store.protocol == "file" + assert store.base_path == "/tmp/test" + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_write_and_read_bytes() -> None: + """Test write and read bytes operations.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + test_data = b"test data content" + + store.write_bytes("test_file.bin", test_data) + result = store.read_bytes("test_file.bin") + + assert result == test_data + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_write_and_read_text() -> None: + """Test write and read text operations.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + test_text = "test text content\nwith multiple lines" + + store.write_text("test_file.txt", test_text) + result = store.read_text("test_file.txt") + + assert result == test_text + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_exists() -> None: + """Test exists operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + assert not store.exists("nonexistent.txt") + + store.write_text("existing.txt", "content") + assert store.exists("existing.txt") + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_delete() -> None: + """Test delete operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + store.write_text("to_delete.txt", "content") + assert store.exists("to_delete.txt") + + store.delete("to_delete.txt") + assert not store.exists("to_delete.txt") + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_copy() -> None: + """Test copy operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + original_content = "original content" + + store.write_text("original.txt", original_content) + store.copy("original.txt", "copied.txt") + + assert store.exists("copied.txt") + assert store.read_text("copied.txt") == original_content + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_move() -> None: + """Test move operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + original_content = "content to move" + + store.write_text("original.txt", original_content) + store.move("original.txt", "moved.txt") + + assert not store.exists("original.txt") + assert store.exists("moved.txt") + assert store.read_text("moved.txt") == original_content + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_list_objects() -> None: + """Test list_objects operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test files + store.write_text("file1.txt", "content1") + store.write_text("file2.txt", "content2") + store.write_text("subdir/file3.txt", "content3") + + # List all objects + all_objects = store.list_objects() + assert any("file1.txt" in obj for obj in all_objects) + assert any("file2.txt" in obj for obj in all_objects) + assert any("file3.txt" in obj for obj in all_objects) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_glob() -> None: + """Test glob pattern matching.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test files + store.write_text("test1.sql", "SELECT 1") + store.write_text("test2.sql", "SELECT 2") + store.write_text("config.json", "{}") + + # Test glob patterns + sql_files = store.glob("*.sql") + assert any("test1.sql" in obj for obj in sql_files) + assert any("test2.sql" in obj for obj in sql_files) + assert not any("config.json" in obj for obj in sql_files) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_get_metadata() -> None: + """Test get_metadata operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + test_content = "test content for metadata" + + store.write_text("test_file.txt", test_content) + metadata = store.get_metadata("test_file.txt") + + assert "size" in metadata + assert "exists" in metadata + assert metadata["exists"] is True + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_is_object_and_is_path() -> None: + """Test is_object and is_path operations.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + store.write_text("file.txt", "content") + Path(temp_dir, "subdir").mkdir() + + assert store.is_object("file.txt") + assert not store.is_object("subdir") + assert not store.is_path("file.txt") + assert store.is_path("subdir") + + +@pytest.mark.skipif(not FSSPEC_INSTALLED or not PYARROW_INSTALLED, reason="fsspec or PyArrow not installed") +def test_write_and_read_arrow() -> None: + """Test write and read Arrow table operations.""" + import pyarrow as pa + + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3], "name": ["Alice", "Bob", "Charlie"], "score": [95.5, 87.0, 92.3]} + table = pa.table(data) + + store.write_arrow("test_data.parquet", table) + result = store.read_arrow("test_data.parquet") + + assert result.equals(table) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED or not PYARROW_INSTALLED, reason="fsspec or PyArrow not installed") +def test_stream_arrow() -> None: + """Test stream Arrow record batches.""" + import pyarrow as pa + + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3, 4, 5], "value": ["a", "b", "c", "d", "e"]} + table = pa.table(data) + + store.write_arrow("stream_test.parquet", table) + + # Stream record batches + batches = list(store.stream_arrow("stream_test.parquet")) + assert len(batches) > 0 + + # Verify we can read the data + reconstructed = pa.Table.from_batches(batches) + assert reconstructed.equals(table) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_sign_returns_uri() -> None: + """Test sign returns URI for files.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + store.write_text("test.txt", "content") + signed_url = store.sign("test.txt") + + assert "test.txt" in signed_url + + +def test_fsspec_not_installed() -> None: + """Test error when fsspec is not installed.""" + if FSSPEC_INSTALLED: + pytest.skip("fsspec is installed") + + with pytest.raises(MissingDependencyError, match="fsspec"): + from sqlspec.storage.backends.fsspec import FSSpecBackend + + FSSpecBackend("file") + + +# Async tests + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_write_and_read_bytes() -> None: + """Test async write and read bytes operations.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + test_data = b"async test data content" + + await store.write_bytes_async("async_test_file.bin", test_data) + result = await store.read_bytes_async("async_test_file.bin") + + assert result == test_data + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_write_and_read_text() -> None: + """Test async write and read text operations.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + test_text = "async test text content\nwith multiple lines" + + await store.write_text_async("async_test_file.txt", test_text) + result = await store.read_text_async("async_test_file.txt") + + assert result == test_text + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_exists() -> None: + """Test async exists operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + assert not await store.exists_async("async_nonexistent.txt") + + await store.write_text_async("async_existing.txt", "content") + assert await store.exists_async("async_existing.txt") + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_delete() -> None: + """Test async delete operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + await store.write_text_async("async_to_delete.txt", "content") + assert await store.exists_async("async_to_delete.txt") + + await store.delete_async("async_to_delete.txt") + assert not await store.exists_async("async_to_delete.txt") + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_copy() -> None: + """Test async copy operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + original_content = "async original content" + + await store.write_text_async("async_original.txt", original_content) + await store.copy_async("async_original.txt", "async_copied.txt") + + assert await store.exists_async("async_copied.txt") + assert await store.read_text_async("async_copied.txt") == original_content + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_move() -> None: + """Test async move operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + original_content = "async content to move" + + await store.write_text_async("async_original.txt", original_content) + await store.move_async("async_original.txt", "async_moved.txt") + + assert not await store.exists_async("async_original.txt") + assert await store.exists_async("async_moved.txt") + assert await store.read_text_async("async_moved.txt") == original_content + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_list_objects() -> None: + """Test async list_objects operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test files + await store.write_text_async("async_file1.txt", "content1") + await store.write_text_async("async_file2.txt", "content2") + await store.write_text_async("async_subdir/file3.txt", "content3") + + # List all objects + all_objects = await store.list_objects_async() + assert any("file1.txt" in obj for obj in all_objects) + assert any("file2.txt" in obj for obj in all_objects) + assert any("file3.txt" in obj for obj in all_objects) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_get_metadata() -> None: + """Test async get_metadata operation.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + test_content = "async test content for metadata" + + await store.write_text_async("async_test_file.txt", test_content) + metadata = await store.get_metadata_async("async_test_file.txt") + + assert "size" in metadata + assert "exists" in metadata + assert metadata["exists"] is True + + +@pytest.mark.skipif(not FSSPEC_INSTALLED or not PYARROW_INSTALLED, reason="fsspec or PyArrow not installed") +async def test_async_write_and_read_arrow() -> None: + """Test async write and read Arrow table operations.""" + import pyarrow as pa + + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3, 4], "name": ["Alice", "Bob", "Charlie", "David"], "score": [95.5, 87.0, 92.3, 89.7]} + table = pa.table(data) + + await store.write_arrow_async("async_test_data.parquet", table) + result = await store.read_arrow_async("async_test_data.parquet") + + assert result.equals(table) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED or not PYARROW_INSTALLED, reason="fsspec or PyArrow not installed") +async def test_async_stream_arrow() -> None: + """Test async stream Arrow record batches.""" + import pyarrow as pa + + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3, 4, 5, 6], "value": ["a", "b", "c", "d", "e", "f"]} + table = pa.table(data) + + await store.write_arrow_async("async_stream_test.parquet", table) + + # Stream record batches + batches = [batch async for batch in store.stream_arrow_async("async_stream_test.parquet")] + + assert len(batches) > 0 + + # Verify we can read the data + reconstructed = pa.Table.from_batches(batches) + assert reconstructed.equals(table) + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +async def test_async_sign() -> None: + """Test async sign returns URI for local files.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + await store.write_text_async("async_test.txt", "content") + signed_url = await store.sign_async("async_test.txt") + + assert "async_test.txt" in signed_url + + +def test_fsspec_operations_without_fsspec() -> None: + """Test operations raise proper error without fsspec.""" + if FSSPEC_INSTALLED: + pytest.skip("fsspec is installed") + + with pytest.raises(MissingDependencyError, match="fsspec"): + FSSpecBackend("file") # type: ignore + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_arrow_operations_without_pyarrow() -> None: + """Test Arrow operations raise proper error without PyArrow.""" + from sqlspec.storage.backends.fsspec import FSSpecBackend + + if PYARROW_INSTALLED: + pytest.skip("PyArrow is installed") + + with tempfile.TemporaryDirectory() as temp_dir: + store = FSSpecBackend("file", base_path=temp_dir) + + with pytest.raises(MissingDependencyError, match="pyarrow"): + store.read_arrow("test.parquet") + + with pytest.raises(MissingDependencyError, match="pyarrow"): + store.write_arrow("test.parquet", None) # type: ignore + + with pytest.raises(MissingDependencyError, match="pyarrow"): + list(store.stream_arrow("*.parquet")) diff --git a/tests/unit/test_storage/test_local_store.py b/tests/unit/test_storage/test_local_store.py new file mode 100644 index 000000000..56e6bd93f --- /dev/null +++ b/tests/unit/test_storage/test_local_store.py @@ -0,0 +1,484 @@ +"""Unit tests for LocalStore backend.""" + +import tempfile +from pathlib import Path + +import pyarrow as pa +import pytest + +from sqlspec.exceptions import MissingDependencyError +from sqlspec.storage.backends.local import LocalStore +from sqlspec.typing import PYARROW_INSTALLED + + +def test_init_with_file_uri() -> None: + """Test initialization with file:// URI.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(f"file://{temp_dir}") + assert store.base_path == Path(temp_dir).resolve() + + +def test_init_with_path_string() -> None: + """Test initialization with plain path string.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + assert store.base_path == Path(temp_dir).resolve() + + +def test_init_empty_defaults_to_cwd() -> None: + """Test initialization with empty string defaults to current directory.""" + store = LocalStore("") + assert store.base_path == Path.cwd() + + +def test_write_and_read_bytes() -> None: + """Test write and read bytes operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_data = b"test data content" + + store.write_bytes("test_file.bin", test_data) + result = store.read_bytes("test_file.bin") + + assert result == test_data + + +def test_write_and_read_text() -> None: + """Test write and read text operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_text = "test text content\nwith multiple lines" + + store.write_text("test_file.txt", test_text) + result = store.read_text("test_file.txt") + + assert result == test_text + + +def test_write_and_read_text_custom_encoding() -> None: + """Test write and read text with custom encoding.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_text = "test with ünicode" + + store.write_text("test_file.txt", test_text, encoding="latin-1") + result = store.read_text("test_file.txt", encoding="latin-1") + + assert result == test_text + + +def test_exists() -> None: + """Test exists operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + assert not store.exists("nonexistent.txt") + + store.write_text("existing.txt", "content") + assert store.exists("existing.txt") + + +def test_delete() -> None: + """Test delete operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + store.write_text("to_delete.txt", "content") + assert store.exists("to_delete.txt") + + store.delete("to_delete.txt") + assert not store.exists("to_delete.txt") + + +def test_copy() -> None: + """Test copy operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + original_content = "original content" + + store.write_text("original.txt", original_content) + store.copy("original.txt", "copied.txt") + + assert store.exists("copied.txt") + assert store.read_text("copied.txt") == original_content + + +def test_move() -> None: + """Test move operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + original_content = "content to move" + + store.write_text("original.txt", original_content) + store.move("original.txt", "moved.txt") + + assert not store.exists("original.txt") + assert store.exists("moved.txt") + assert store.read_text("moved.txt") == original_content + + +def test_list_objects() -> None: + """Test list_objects operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test files + store.write_text("file1.txt", "content1") + store.write_text("file2.txt", "content2") + store.write_text("subdir/file3.txt", "content3") + + # List all objects + all_objects = store.list_objects() + assert "file1.txt" in all_objects + assert "file2.txt" in all_objects + assert "subdir/file3.txt" in all_objects + + +def test_list_objects_with_prefix() -> None: + """Test list_objects with prefix filtering.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test files + store.write_text("prefix_file1.txt", "content1") + store.write_text("prefix_file2.txt", "content2") + store.write_text("other_file.txt", "content3") + + # List with prefix + prefixed_objects = store.list_objects(prefix="prefix_") + assert "prefix_file1.txt" in prefixed_objects + assert "prefix_file2.txt" in prefixed_objects + assert "other_file.txt" not in prefixed_objects + + +def test_glob() -> None: + """Test glob pattern matching.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test files + store.write_text("test1.sql", "SELECT 1") + store.write_text("test2.sql", "SELECT 2") + store.write_text("config.json", "{}") + store.write_text("subdir/test3.sql", "SELECT 3") + + # Test glob patterns + sql_files = store.glob("*.sql") + assert "test1.sql" in sql_files + assert "test2.sql" in sql_files + assert "config.json" not in sql_files + + +def test_get_metadata() -> None: + """Test get_metadata operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_content = "test content for metadata" + + store.write_text("test_file.txt", test_content) + metadata = store.get_metadata("test_file.txt") + + assert "size" in metadata + assert "modified" in metadata + assert metadata["size"] == len(test_content.encode()) + + +def test_is_object_and_is_path() -> None: + """Test is_object and is_path operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + store.write_text("file.txt", "content") + (Path(temp_dir) / "subdir").mkdir() + + assert store.is_object("file.txt") + assert not store.is_object("subdir") + assert not store.is_path("file.txt") + assert store.is_path("subdir") + + +@pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") +def test_write_and_read_arrow() -> None: + """Test write and read Arrow table operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3], "name": ["Alice", "Bob", "Charlie"], "score": [95.5, 87.0, 92.3]} + table = pa.table(data) + + store.write_arrow("test_data.parquet", table) + result = store.read_arrow("test_data.parquet") + + assert result.equals(table) + + +@pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") +def test_stream_arrow() -> None: + """Test stream Arrow record batches.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3, 4, 5], "value": ["a", "b", "c", "d", "e"]} + table = pa.table(data) + + store.write_arrow("stream_test.parquet", table) + + # Stream record batches + batches = list(store.stream_arrow("stream_test.parquet")) + assert len(batches) > 0 + + # Verify we can read the data + reconstructed = pa.Table.from_batches(batches) + assert reconstructed.equals(table) + + +def test_sign_returns_file_uri() -> None: + """Test sign returns file:// URI for local files.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + store.write_text("test.txt", "content") + signed_url = store.sign("test.txt") + + assert signed_url.startswith("file://") + assert "test.txt" in signed_url + + +def test_sign_with_options() -> None: + """Test sign with expires_in and for_upload options.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + store.write_text("test.txt", "content") + + # Options are ignored for local files but should not error + signed_url = store.sign("test.txt", expires_in=7200, for_upload=True) + assert signed_url.startswith("file://") + + +def test_resolve_path_absolute() -> None: + """Test path resolution with absolute paths.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Absolute path should be returned as-is + test_path = Path(temp_dir) / "test.txt" + store.write_text("test.txt", "content") + + resolved = store._resolve_path(str(test_path)) + assert resolved == test_path + + +def test_resolve_path_relative() -> None: + """Test path resolution with relative paths.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + resolved = store._resolve_path("subdir/file.txt") + expected = Path(temp_dir).resolve() / "subdir" / "file.txt" + assert resolved == expected + + +def test_nested_directory_operations() -> None: + """Test operations with nested directories.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Write to nested path + store.write_text("level1/level2/file.txt", "nested content") + assert store.exists("level1/level2/file.txt") + assert store.read_text("level1/level2/file.txt") == "nested content" + + # List should include nested files + objects = store.list_objects() + assert "level1/level2/file.txt" in objects + + +def test_file_not_found_errors() -> None: + """Test operations on non-existent files raise appropriate errors.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + with pytest.raises(FileNotFoundError): + store.read_bytes("nonexistent.bin") + + with pytest.raises(FileNotFoundError): + store.read_text("nonexistent.txt") + + +# Async tests + + +async def test_async_write_and_read_bytes() -> None: + """Test async write and read bytes operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_data = b"async test data content" + + await store.write_bytes_async("async_test_file.bin", test_data) + result = await store.read_bytes_async("async_test_file.bin") + + assert result == test_data + + +async def test_async_write_and_read_text() -> None: + """Test async write and read text operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_text = "async test text content\nwith multiple lines" + + await store.write_text_async("async_test_file.txt", test_text) + result = await store.read_text_async("async_test_file.txt") + + assert result == test_text + + +async def test_async_exists() -> None: + """Test async exists operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + assert not await store.exists_async("async_nonexistent.txt") + + await store.write_text_async("async_existing.txt", "content") + assert await store.exists_async("async_existing.txt") + + +async def test_async_delete() -> None: + """Test async delete operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + await store.write_text_async("async_to_delete.txt", "content") + assert await store.exists_async("async_to_delete.txt") + + await store.delete_async("async_to_delete.txt") + assert not await store.exists_async("async_to_delete.txt") + + +async def test_async_copy() -> None: + """Test async copy operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + original_content = "async original content" + + await store.write_text_async("async_original.txt", original_content) + await store.copy_async("async_original.txt", "async_copied.txt") + + assert await store.exists_async("async_copied.txt") + assert await store.read_text_async("async_copied.txt") == original_content + + +async def test_async_move() -> None: + """Test async move operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + original_content = "async content to move" + + await store.write_text_async("async_original.txt", original_content) + await store.move_async("async_original.txt", "async_moved.txt") + + assert not await store.exists_async("async_original.txt") + assert await store.exists_async("async_moved.txt") + assert await store.read_text_async("async_moved.txt") == original_content + + +async def test_async_list_objects() -> None: + """Test async list_objects operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test files + await store.write_text_async("async_file1.txt", "content1") + await store.write_text_async("async_file2.txt", "content2") + await store.write_text_async("async_subdir/file3.txt", "content3") + + # List all objects + all_objects = await store.list_objects_async() + assert "async_file1.txt" in all_objects + assert "async_file2.txt" in all_objects + assert "async_subdir/file3.txt" in all_objects + + +async def test_async_get_metadata() -> None: + """Test async get_metadata operation.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + test_content = "async test content for metadata" + + await store.write_text_async("async_test_file.txt", test_content) + metadata = await store.get_metadata_async("async_test_file.txt") + + assert "size" in metadata + assert "modified" in metadata + assert metadata["size"] == len(test_content.encode()) + + +@pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") +async def test_async_write_and_read_arrow() -> None: + """Test async write and read Arrow table operations.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3, 4], "name": ["Alice", "Bob", "Charlie", "David"], "score": [95.5, 87.0, 92.3, 89.7]} + table = pa.table(data) + + await store.write_arrow_async("async_test_data.parquet", table) + result = await store.read_arrow_async("async_test_data.parquet") + + assert result.equals(table) + + +@pytest.mark.skipif(not PYARROW_INSTALLED, reason="PyArrow not installed") +async def test_async_stream_arrow() -> None: + """Test async stream Arrow record batches.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + # Create test Arrow table + data = {"id": [1, 2, 3, 4, 5, 6], "value": ["a", "b", "c", "d", "e", "f"]} + table = pa.table(data) + + await store.write_arrow_async("async_stream_test.parquet", table) + + # Stream record batches + batches = [batch async for batch in store.stream_arrow_async("async_stream_test.parquet")] + + assert len(batches) > 0 + + # Verify we can read the data + reconstructed = pa.Table.from_batches(batches) + assert reconstructed.equals(table) + + +async def test_async_sign() -> None: + """Test async sign returns file:// URI for local files.""" + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + await store.write_text_async("async_test.txt", "content") + signed_url = await store.sign_async("async_test.txt") + + assert signed_url.startswith("file://") + assert "async_test.txt" in signed_url + + +def test_arrow_operations_without_pyarrow() -> None: + """Test Arrow operations raise proper error without PyArrow.""" + if PYARROW_INSTALLED: + pytest.skip("PyArrow is installed") + + with tempfile.TemporaryDirectory() as temp_dir: + store = LocalStore(temp_dir) + + with pytest.raises(MissingDependencyError, match="pyarrow"): + store.read_arrow("test.parquet") + + with pytest.raises(MissingDependencyError, match="pyarrow"): + store.write_arrow("test.parquet", None) # type: ignore + + with pytest.raises(MissingDependencyError, match="pyarrow"): + list(store.stream_arrow("*.parquet")) diff --git a/tests/unit/test_storage/test_obstore_backend.py b/tests/unit/test_storage/test_obstore_backend.py new file mode 100644 index 000000000..b1fb4b430 --- /dev/null +++ b/tests/unit/test_storage/test_obstore_backend.py @@ -0,0 +1,488 @@ +"""Unit tests for ObStoreBackend.""" + +import tempfile + +import pytest + +from sqlspec.exceptions import MissingDependencyError +from sqlspec.typing import OBSTORE_INSTALLED, PYARROW_INSTALLED + +if OBSTORE_INSTALLED: + from sqlspec.storage.backends.obstore import ObStoreBackend + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_init_with_file_uri() -> None: + """Test initialization with file:// URI.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + assert store.base_path == "" + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_from_config() -> None: + """Test from_config class method.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + data_dir = f"{temp_dir}/data" + config = {"store_uri": f"file://{data_dir}", "store_options": {}} + store = ObStoreBackend.from_config(config) + assert store.base_path == "" + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_write_and_read_bytes() -> None: + """Test write and read bytes operations.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + test_data = b"test data content" + + store.write_bytes("test_file.bin", test_data) + result = store.read_bytes("test_file.bin") + + assert result == test_data + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_write_and_read_text() -> None: + """Test write and read text operations.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + test_text = "test text content\nwith multiple lines" + + store.write_text("test_file.txt", test_text) + result = store.read_text("test_file.txt") + + assert result == test_text + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_exists() -> None: + """Test exists operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + assert not store.exists("nonexistent.txt") + + store.write_text("existing.txt", "content") + assert store.exists("existing.txt") + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_delete() -> None: + """Test delete operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + store.write_text("to_delete.txt", "content") + assert store.exists("to_delete.txt") + + store.delete("to_delete.txt") + assert not store.exists("to_delete.txt") + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_copy() -> None: + """Test copy operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + original_content = "original content" + + store.write_text("original.txt", original_content) + store.copy("original.txt", "copied.txt") + + assert store.exists("copied.txt") + assert store.read_text("copied.txt") == original_content + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_move() -> None: + """Test move operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + original_content = "content to move" + + store.write_text("original.txt", original_content) + store.move("original.txt", "moved.txt") + + assert not store.exists("original.txt") + assert store.exists("moved.txt") + assert store.read_text("moved.txt") == original_content + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_list_objects() -> None: + """Test list_objects operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test files + store.write_text("file1.txt", "content1") + store.write_text("file2.txt", "content2") + store.write_text("subdir/file3.txt", "content3") + + # List all objects + all_objects = store.list_objects() + assert any("file1.txt" in obj for obj in all_objects) + assert any("file2.txt" in obj for obj in all_objects) + assert any("file3.txt" in obj for obj in all_objects) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_glob() -> None: + """Test glob pattern matching.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test files + store.write_text("test1.sql", "SELECT 1") + store.write_text("test2.sql", "SELECT 2") + store.write_text("config.json", "{}") + + # Test glob patterns + sql_files = store.glob("*.sql") + assert any("test1.sql" in obj for obj in sql_files) + assert any("test2.sql" in obj for obj in sql_files) + assert not any("config.json" in obj for obj in sql_files) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_get_metadata() -> None: + """Test get_metadata operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + test_content = "test content for metadata" + + store.write_text("test_file.txt", test_content) + metadata = store.get_metadata("test_file.txt") + + assert "exists" in metadata + assert metadata["exists"] is True + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_is_object_and_is_path() -> None: + """Test is_object and is_path operations.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + store.write_text("file.txt", "content") + # Create directory by writing file inside it + store.write_text("subdir/nested.txt", "content") + + assert store.is_object("file.txt") + assert not store.is_object("subdir") + assert not store.is_path("file.txt") + assert store.is_path("subdir") + + +@pytest.mark.skipif(not OBSTORE_INSTALLED or not PYARROW_INSTALLED, reason="obstore or PyArrow not installed") +def test_write_and_read_arrow() -> None: + """Test write and read Arrow table operations.""" + import pyarrow as pa + + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test Arrow table + data = {"id": [1, 2, 3], "name": ["Alice", "Bob", "Charlie"], "score": [95.5, 87.0, 92.3]} + table = pa.table(data) + + store.write_arrow("test_data.parquet", table) + result = store.read_arrow("test_data.parquet") + + assert result.equals(table) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED or not PYARROW_INSTALLED, reason="obstore or PyArrow not installed") +def test_stream_arrow() -> None: + """Test stream Arrow record batches.""" + import pyarrow as pa + + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test Arrow table + data = {"id": [1, 2, 3, 4, 5], "value": ["a", "b", "c", "d", "e"]} + table = pa.table(data) + + store.write_arrow("stream_test.parquet", table) + + # Stream record batches + batches = list(store.stream_arrow("stream_test.parquet")) + assert len(batches) > 0 + + # Verify we can read the data + reconstructed = pa.Table.from_batches(batches) + assert reconstructed.equals(table) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_sign_returns_uri() -> None: + """Test sign returns URI for files.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + store.write_text("test.txt", "content") + signed_url = store.sign("test.txt") + + assert "test.txt" in signed_url + + +def test_obstore_not_installed() -> None: + """Test error when obstore is not installed.""" + if OBSTORE_INSTALLED: + pytest.skip("obstore is installed") + + with pytest.raises(MissingDependencyError, match="obstore"): + ObStoreBackend("file:///tmp") # type: ignore + + +# Async tests + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_write_and_read_bytes() -> None: + """Test async write and read bytes operations.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + test_data = b"async test data content" + + await store.write_bytes_async("async_test_file.bin", test_data) + result = await store.read_bytes_async("async_test_file.bin") + + assert result == test_data + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_write_and_read_text() -> None: + """Test async write and read text operations.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + test_text = "async test text content\nwith multiple lines" + + await store.write_text_async("async_test_file.txt", test_text) + result = await store.read_text_async("async_test_file.txt") + + assert result == test_text + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_exists() -> None: + """Test async exists operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + assert not await store.exists_async("async_nonexistent.txt") + + await store.write_text_async("async_existing.txt", "content") + assert await store.exists_async("async_existing.txt") + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_delete() -> None: + """Test async delete operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + await store.write_text_async("async_to_delete.txt", "content") + assert await store.exists_async("async_to_delete.txt") + + await store.delete_async("async_to_delete.txt") + assert not await store.exists_async("async_to_delete.txt") + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_copy() -> None: + """Test async copy operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + original_content = "async original content" + + await store.write_text_async("async_original.txt", original_content) + await store.copy_async("async_original.txt", "async_copied.txt") + + assert await store.exists_async("async_copied.txt") + assert await store.read_text_async("async_copied.txt") == original_content + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_move() -> None: + """Test async move operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + original_content = "async content to move" + + await store.write_text_async("async_original.txt", original_content) + await store.move_async("async_original.txt", "async_moved.txt") + + assert not await store.exists_async("async_original.txt") + assert await store.exists_async("async_moved.txt") + assert await store.read_text_async("async_moved.txt") == original_content + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_list_objects() -> None: + """Test async list_objects operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test files + await store.write_text_async("async_file1.txt", "content1") + await store.write_text_async("async_file2.txt", "content2") + await store.write_text_async("async_subdir/file3.txt", "content3") + + # List all objects + all_objects = await store.list_objects_async() + assert any("file1.txt" in obj for obj in all_objects) + assert any("file2.txt" in obj for obj in all_objects) + assert any("file3.txt" in obj for obj in all_objects) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_get_metadata() -> None: + """Test async get_metadata operation.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + test_content = "async test content for metadata" + + await store.write_text_async("async_test_file.txt", test_content) + metadata = await store.get_metadata_async("async_test_file.txt") + + assert "exists" in metadata + assert metadata["exists"] is True + + +@pytest.mark.skipif(not OBSTORE_INSTALLED or not PYARROW_INSTALLED, reason="obstore or PyArrow not installed") +async def test_async_write_and_read_arrow() -> None: + """Test async write and read Arrow table operations.""" + import pyarrow as pa + + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test Arrow table + data = {"id": [1, 2, 3, 4], "name": ["Alice", "Bob", "Charlie", "David"], "score": [95.5, 87.0, 92.3, 89.7]} + table = pa.table(data) + + await store.write_arrow_async("async_test_data.parquet", table) + result = await store.read_arrow_async("async_test_data.parquet") + + assert result.equals(table) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED or not PYARROW_INSTALLED, reason="obstore or PyArrow not installed") +async def test_async_stream_arrow() -> None: + """Test async stream Arrow record batches.""" + import pyarrow as pa + + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + # Create test Arrow table + data = {"id": [1, 2, 3, 4, 5, 6], "value": ["a", "b", "c", "d", "e", "f"]} + table = pa.table(data) + + await store.write_arrow_async("async_stream_test.parquet", table) + + # Stream record batches + batches = [batch async for batch in store.stream_arrow_async("async_stream_test.parquet")] + + assert len(batches) > 0 + + # Verify we can read the data + reconstructed = pa.Table.from_batches(batches) + assert reconstructed.equals(table) + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +async def test_async_sign() -> None: + """Test async sign returns URI for files.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + await store.write_text_async("async_test.txt", "content") + signed_url = await store.sign_async("async_test.txt") + + assert "async_test.txt" in signed_url + + +def test_obstore_operations_without_obstore() -> None: + """Test operations raise proper error without obstore.""" + if OBSTORE_INSTALLED: + pytest.skip("obstore is installed") + + with pytest.raises(MissingDependencyError, match="obstore"): + ObStoreBackend("file:///tmp") # type: ignore + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_arrow_operations_without_pyarrow() -> None: + """Test Arrow operations raise proper error without PyArrow.""" + from sqlspec.storage.backends.obstore import ObStoreBackend + + if PYARROW_INSTALLED: + pytest.skip("PyArrow is installed") + + with tempfile.TemporaryDirectory() as temp_dir: + store = ObStoreBackend(f"file://{temp_dir}") + + with pytest.raises(MissingDependencyError, match="pyarrow"): + store.read_arrow("test.parquet") + + with pytest.raises(MissingDependencyError, match="pyarrow"): + store.write_arrow("test.parquet", None) # type: ignore + + with pytest.raises(MissingDependencyError, match="pyarrow"): + list(store.stream_arrow("*.parquet")) diff --git a/tests/unit/test_storage/test_storage_registry.py b/tests/unit/test_storage/test_storage_registry.py new file mode 100644 index 000000000..259db3a61 --- /dev/null +++ b/tests/unit/test_storage/test_storage_registry.py @@ -0,0 +1,238 @@ +"""Unit tests for StorageRegistry.""" + +import tempfile +from pathlib import Path + +import pytest + +from sqlspec.exceptions import ImproperConfigurationError, MissingDependencyError +from sqlspec.storage.registry import StorageRegistry, _is_local_uri +from sqlspec.typing import FSSPEC_INSTALLED, OBSTORE_INSTALLED + + +def test_is_local_uri() -> None: + """Test _is_local_uri type guard function.""" + # Absolute paths + assert _is_local_uri("/absolute/path") + assert _is_local_uri("C:\\Windows\\path") + + # Relative paths + assert _is_local_uri("./relative/path") + assert _is_local_uri("../parent/path") + assert _is_local_uri("~/home/path") + assert _is_local_uri("relative/path") + + # URIs should return False + assert not _is_local_uri("s3://bucket/key") + assert not _is_local_uri("https://example.com") + assert not _is_local_uri("gs://bucket") + + +def test_registry_init() -> None: + """Test registry initialization.""" + registry = StorageRegistry() + assert len(registry.list_aliases()) == 0 + + +def test_register_alias() -> None: + """Test alias registration.""" + registry = StorageRegistry() + + registry.register_alias("test_store", "file:///tmp/test") + assert registry.is_alias_registered("test_store") + assert "test_store" in registry.list_aliases() + + +def test_get_local_backend() -> None: + """Test getting local backend.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + # Test direct path + backend = registry.get(temp_dir) + assert backend.backend_type == "local" + + # Test file:// URI + backend = registry.get(f"file://{temp_dir}") + assert backend.backend_type == "local" + + +def test_get_alias() -> None: + """Test getting backend by alias.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + registry.register_alias("my_store", f"file://{temp_dir}") + + backend = registry.get("my_store") + assert backend.backend_type == "local" + + +def test_get_with_backend_override() -> None: + """Test getting backend with override.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + # Force local backend + backend = registry.get(f"file://{temp_dir}", backend="local") + assert backend.backend_type == "local" + + +@pytest.mark.skipif(not FSSPEC_INSTALLED, reason="fsspec not installed") +def test_get_fsspec_backend() -> None: + """Test getting fsspec backend.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + backend = registry.get(f"file://{temp_dir}", backend="fsspec") + assert backend.backend_type == "fsspec" + + +@pytest.mark.skipif(not OBSTORE_INSTALLED, reason="obstore not installed") +def test_get_obstore_backend() -> None: + """Test getting obstore backend.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + backend = registry.get(f"file://{temp_dir}", backend="obstore") + assert backend.backend_type == "obstore" + + +def test_get_invalid_alias_raises_error() -> None: + """Test getting invalid alias raises error.""" + registry = StorageRegistry() + + with pytest.raises(ImproperConfigurationError, match="Unknown storage alias"): + registry.get("nonexistent_alias") + + +def test_get_empty_uri_raises_error() -> None: + """Test getting empty URI raises error.""" + registry = StorageRegistry() + + with pytest.raises(ImproperConfigurationError, match="URI or alias cannot be empty"): + registry.get("") + + +def test_get_invalid_backend_raises_error() -> None: + """Test getting invalid backend type raises error.""" + registry = StorageRegistry() + + with pytest.raises(ValueError, match="Unknown backend type"): + registry.get("file:///tmp", backend="invalid") + + +def test_register_alias_with_base_path() -> None: + """Test alias registration with base_path.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + registry.register_alias("test_store", f"file://{temp_dir}/data") + backend = registry.get("test_store") + + # Write and read to verify base_path works + backend.write_text("test.txt", "content") + assert backend.exists("test.txt") + + +def test_register_alias_with_backend_override() -> None: + """Test alias registration with backend override.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + registry.register_alias("test_store", f"file://{temp_dir}", backend="local") + backend = registry.get("test_store") + assert backend.backend_type == "local" + + +def test_cache_functionality() -> None: + """Test registry caching.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + # Get same backend twice + backend1 = registry.get(f"file://{temp_dir}") + backend2 = registry.get(f"file://{temp_dir}") + + # Should be the same instance + assert backend1 is backend2 + + +def test_clear_cache() -> None: + """Test cache clearing.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + backend1 = registry.get(f"file://{temp_dir}") + registry.clear_cache(f"file://{temp_dir}") + backend2 = registry.get(f"file://{temp_dir}") + + # Should be different instances after cache clear + assert backend1 is not backend2 + + +def test_clear_aliases() -> None: + """Test clearing aliases.""" + registry = StorageRegistry() + + registry.register_alias("test_store", "file:///tmp") + assert registry.is_alias_registered("test_store") + + registry.clear_aliases() + assert not registry.is_alias_registered("test_store") + assert len(registry.list_aliases()) == 0 + + +def test_clear_instances() -> None: + """Test clearing instances.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + backend1 = registry.get(f"file://{temp_dir}") + registry.clear_instances() + backend2 = registry.get(f"file://{temp_dir}") + + # Should be different instances after clear + assert backend1 is not backend2 + + +def test_clear_all() -> None: + """Test clearing everything.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + + registry.register_alias("test_store", f"file://{temp_dir}") + backend1 = registry.get("test_store") + + registry.clear() + + assert not registry.is_alias_registered("test_store") + assert len(registry.list_aliases()) == 0 + + # Should create new instance + registry.register_alias("test_store", f"file://{temp_dir}") + backend2 = registry.get("test_store") + assert backend1 is not backend2 + + +def test_path_object_conversion() -> None: + """Test Path object conversion to file:// URI.""" + with tempfile.TemporaryDirectory() as temp_dir: + registry = StorageRegistry() + path_obj = Path(temp_dir) + + backend = registry.get(path_obj) + assert backend.backend_type == "local" + + +def test_cloud_storage_without_backends() -> None: + """Test cloud storage URIs without backends raise proper errors.""" + if OBSTORE_INSTALLED or FSSPEC_INSTALLED: + pytest.skip("Storage backends are installed") + + registry = StorageRegistry() + + with pytest.raises(MissingDependencyError, match="No backend available"): + registry.get("s3://bucket") + + with pytest.raises(MissingDependencyError, match="No backend available"): + registry.get("gs://bucket") diff --git a/tests/unit/test_utils/test_type_guards.py b/tests/unit/test_utils/test_type_guards.py index 13e031016..1fa9ff3c5 100644 --- a/tests/unit/test_utils/test_type_guards.py +++ b/tests/unit/test_utils/test_type_guards.py @@ -955,7 +955,7 @@ class InvalidConfigStruct: assert result is None class InvalidConfigStruct2: - __struct_config__ = None + __struct_config__ = None # type: ignore[var-annotated] result = get_msgspec_rename_config(InvalidConfigStruct2) assert result is None diff --git a/uv.lock b/uv.lock index b041ec104..a642120de 100644 --- a/uv.lock +++ b/uv.lock @@ -118,6 +118,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b4/18/c857aecc1b80c02bb0b9af8464ef7c250caab2a0120a68f56b4501db32f6/adbc_driver_sqlite-1.7.0-py3-none-win_amd64.whl", hash = "sha256:d70f05a1d737ac477564e8810985101d6e8c6e632f790e396531ece8d3a93248", size = 867977, upload-time = "2025-07-07T06:23:06.155Z" }, ] +[[package]] +name = "aiobotocore" +version = "2.24.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "aioitertools" }, + { name = "botocore" }, + { name = "jmespath" }, + { name = "multidict" }, + { name = "python-dateutil" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/02/b4ed1af4b3437c2fc6e6111e7fdee011b34cf1c0cc8f314474f843e10019/aiobotocore-2.24.1.tar.gz", hash = "sha256:59237f1b2d4ff619f9a9e78360b691d59b92fdd4d03d054dbd2eeff8ada5667e", size = 119754, upload-time = "2025-08-15T15:49:53.209Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/26/c3c93209084e24990ad1b4214f67dce1c0183454cec9cd2cad9433f493bb/aiobotocore-2.24.1-py3-none-any.whl", hash = "sha256:557922823455ca65bbd065b363b54846f16b9c4b6bd0b61ecdfa01ca13a04531", size = 85216, upload-time = "2025-08-15T15:49:51.442Z" }, +] + [[package]] name = "aiohappyeyeballs" version = "2.6.1" @@ -230,6 +248,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/25/e0cf8793aedc41c6d7f2aad646a27e27bdacafe3b402bb373d7651c94d73/aiohttp-3.12.15-cp39-cp39-win_amd64.whl", hash = "sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8", size = 453370, upload-time = "2025-07-29T05:52:29.936Z" }, ] +[[package]] +name = "aioitertools" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/de/38491a84ab323b47c7f86e94d2830e748780525f7a10c8600b67ead7e9ea/aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b", size = 19369, upload-time = "2024-09-02T03:33:40.349Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/13/58b70a580de00893223d61de8fea167877a3aed97d4a5e1405c9159ef925/aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796", size = 24345, upload-time = "2024-09-02T03:34:59.454Z" }, +] + [[package]] name = "aioodbc" version = "0.5.0" @@ -641,6 +671,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, ] +[[package]] +name = "botocore" +version = "1.39.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/d0/9d64261186cff650fe63168441edb4f4cd33f085a74c0c54455630a71f91/botocore-1.39.11.tar.gz", hash = "sha256:953b12909d6799350e346ab038e55b6efe622c616f80aef74d7a6683ffdd972c", size = 14217749, upload-time = "2025-07-22T19:26:40.723Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1c/2c/8a0b02d60a1dbbae7faa5af30484b016aa3023f9833dfc0d19b0b770dd6a/botocore-1.39.11-py3-none-any.whl", hash = "sha256:1545352931a8a186f3e977b1e1a4542d7d434796e274c3c62efd0210b5ea76dc", size = 13876276, upload-time = "2025-07-22T19:26:35.164Z" }, +] + [[package]] name = "bracex" version = "2.6" @@ -919,97 +964,97 @@ wheels = [ [[package]] name = "coverage" -version = "7.10.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/83/153f54356c7c200013a752ce1ed5448573dca546ce125801afca9e1ac1a4/coverage-7.10.5.tar.gz", hash = "sha256:f2e57716a78bc3ae80b2207be0709a3b2b63b9f2dcf9740ee6ac03588a2015b6", size = 821662, upload-time = "2025-08-23T14:42:44.78Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/70/e77b0061a6c7157bfce645c6b9a715a08d4c86b3360a7b3252818080b817/coverage-7.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c6a5c3414bfc7451b879141ce772c546985163cf553f08e0f135f0699a911801", size = 216774, upload-time = "2025-08-23T14:40:26.301Z" }, - { url = "https://files.pythonhosted.org/packages/91/08/2a79de5ecf37ee40f2d898012306f11c161548753391cec763f92647837b/coverage-7.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bc8e4d99ce82f1710cc3c125adc30fd1487d3cf6c2cd4994d78d68a47b16989a", size = 217175, upload-time = "2025-08-23T14:40:29.142Z" }, - { url = "https://files.pythonhosted.org/packages/64/57/0171d69a699690149a6ba6a4eb702814448c8d617cf62dbafa7ce6bfdf63/coverage-7.10.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:02252dc1216e512a9311f596b3169fad54abcb13827a8d76d5630c798a50a754", size = 243931, upload-time = "2025-08-23T14:40:30.735Z" }, - { url = "https://files.pythonhosted.org/packages/15/06/3a67662c55656702bd398a727a7f35df598eb11104fcb34f1ecbb070291a/coverage-7.10.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:73269df37883e02d460bee0cc16be90509faea1e3bd105d77360b512d5bb9c33", size = 245740, upload-time = "2025-08-23T14:40:32.302Z" }, - { url = "https://files.pythonhosted.org/packages/00/f4/f8763aabf4dc30ef0d0012522d312f0b7f9fede6246a1f27dbcc4a1e523c/coverage-7.10.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f8a81b0614642f91c9effd53eec284f965577591f51f547a1cbeb32035b4c2f", size = 247600, upload-time = "2025-08-23T14:40:33.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/31/6632219a9065e1b83f77eda116fed4c76fb64908a6a9feae41816dab8237/coverage-7.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6a29f8e0adb7f8c2b95fa2d4566a1d6e6722e0a637634c6563cb1ab844427dd9", size = 245640, upload-time = "2025-08-23T14:40:35.248Z" }, - { url = "https://files.pythonhosted.org/packages/6e/e2/3dba9b86037b81649b11d192bb1df11dde9a81013e434af3520222707bc8/coverage-7.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fcf6ab569436b4a647d4e91accba12509ad9f2554bc93d3aee23cc596e7f99c3", size = 243659, upload-time = "2025-08-23T14:40:36.815Z" }, - { url = "https://files.pythonhosted.org/packages/02/b9/57170bd9f3e333837fc24ecc88bc70fbc2eb7ccfd0876854b0c0407078c3/coverage-7.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:90dc3d6fb222b194a5de60af8d190bedeeddcbc7add317e4a3cd333ee6b7c879", size = 244537, upload-time = "2025-08-23T14:40:38.737Z" }, - { url = "https://files.pythonhosted.org/packages/b3/1c/93ac36ef1e8b06b8d5777393a3a40cb356f9f3dab980be40a6941e443588/coverage-7.10.5-cp310-cp310-win32.whl", hash = "sha256:414a568cd545f9dc75f0686a0049393de8098414b58ea071e03395505b73d7a8", size = 219285, upload-time = "2025-08-23T14:40:40.342Z" }, - { url = "https://files.pythonhosted.org/packages/30/95/23252277e6e5fe649d6cd3ed3f35d2307e5166de4e75e66aa7f432abc46d/coverage-7.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:e551f9d03347196271935fd3c0c165f0e8c049220280c1120de0084d65e9c7ff", size = 220185, upload-time = "2025-08-23T14:40:42.026Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f2/336d34d2fc1291ca7c18eeb46f64985e6cef5a1a7ef6d9c23720c6527289/coverage-7.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c177e6ffe2ebc7c410785307758ee21258aa8e8092b44d09a2da767834f075f2", size = 216890, upload-time = "2025-08-23T14:40:43.627Z" }, - { url = "https://files.pythonhosted.org/packages/39/ea/92448b07cc1cf2b429d0ce635f59cf0c626a5d8de21358f11e92174ff2a6/coverage-7.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14d6071c51ad0f703d6440827eaa46386169b5fdced42631d5a5ac419616046f", size = 217287, upload-time = "2025-08-23T14:40:45.214Z" }, - { url = "https://files.pythonhosted.org/packages/96/ba/ad5b36537c5179c808d0ecdf6e4aa7630b311b3c12747ad624dcd43a9b6b/coverage-7.10.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:61f78c7c3bc272a410c5ae3fde7792b4ffb4acc03d35a7df73ca8978826bb7ab", size = 247683, upload-time = "2025-08-23T14:40:46.791Z" }, - { url = "https://files.pythonhosted.org/packages/28/e5/fe3bbc8d097029d284b5fb305b38bb3404895da48495f05bff025df62770/coverage-7.10.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f39071caa126f69d63f99b324fb08c7b1da2ec28cbb1fe7b5b1799926492f65c", size = 249614, upload-time = "2025-08-23T14:40:48.082Z" }, - { url = "https://files.pythonhosted.org/packages/69/9c/a1c89a8c8712799efccb32cd0a1ee88e452f0c13a006b65bb2271f1ac767/coverage-7.10.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343a023193f04d46edc46b2616cdbee68c94dd10208ecd3adc56fcc54ef2baa1", size = 251719, upload-time = "2025-08-23T14:40:49.349Z" }, - { url = "https://files.pythonhosted.org/packages/e9/be/5576b5625865aa95b5633315f8f4142b003a70c3d96e76f04487c3b5cc95/coverage-7.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:585ffe93ae5894d1ebdee69fc0b0d4b7c75d8007983692fb300ac98eed146f78", size = 249411, upload-time = "2025-08-23T14:40:50.624Z" }, - { url = "https://files.pythonhosted.org/packages/94/0a/e39a113d4209da0dbbc9385608cdb1b0726a4d25f78672dc51c97cfea80f/coverage-7.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0ef4e66f006ed181df29b59921bd8fc7ed7cd6a9289295cd8b2824b49b570df", size = 247466, upload-time = "2025-08-23T14:40:52.362Z" }, - { url = "https://files.pythonhosted.org/packages/40/cb/aebb2d8c9e3533ee340bea19b71c5b76605a0268aa49808e26fe96ec0a07/coverage-7.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb7b0bbf7cc1d0453b843eca7b5fa017874735bef9bfdfa4121373d2cc885ed6", size = 248104, upload-time = "2025-08-23T14:40:54.064Z" }, - { url = "https://files.pythonhosted.org/packages/08/e6/26570d6ccce8ff5de912cbfd268e7f475f00597cb58da9991fa919c5e539/coverage-7.10.5-cp311-cp311-win32.whl", hash = "sha256:1d043a8a06987cc0c98516e57c4d3fc2c1591364831e9deb59c9e1b4937e8caf", size = 219327, upload-time = "2025-08-23T14:40:55.424Z" }, - { url = "https://files.pythonhosted.org/packages/79/79/5f48525e366e518b36e66167e3b6e5db6fd54f63982500c6a5abb9d3dfbd/coverage-7.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:fefafcca09c3ac56372ef64a40f5fe17c5592fab906e0fdffd09543f3012ba50", size = 220213, upload-time = "2025-08-23T14:40:56.724Z" }, - { url = "https://files.pythonhosted.org/packages/40/3c/9058128b7b0bf333130c320b1eb1ae485623014a21ee196d68f7737f8610/coverage-7.10.5-cp311-cp311-win_arm64.whl", hash = "sha256:7e78b767da8b5fc5b2faa69bb001edafcd6f3995b42a331c53ef9572c55ceb82", size = 218893, upload-time = "2025-08-23T14:40:58.011Z" }, - { url = "https://files.pythonhosted.org/packages/27/8e/40d75c7128f871ea0fd829d3e7e4a14460cad7c3826e3b472e6471ad05bd/coverage-7.10.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c2d05c7e73c60a4cecc7d9b60dbfd603b4ebc0adafaef371445b47d0f805c8a9", size = 217077, upload-time = "2025-08-23T14:40:59.329Z" }, - { url = "https://files.pythonhosted.org/packages/18/a8/f333f4cf3fb5477a7f727b4d603a2eb5c3c5611c7fe01329c2e13b23b678/coverage-7.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32ddaa3b2c509778ed5373b177eb2bf5662405493baeff52278a0b4f9415188b", size = 217310, upload-time = "2025-08-23T14:41:00.628Z" }, - { url = "https://files.pythonhosted.org/packages/ec/2c/fbecd8381e0a07d1547922be819b4543a901402f63930313a519b937c668/coverage-7.10.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dd382410039fe062097aa0292ab6335a3f1e7af7bba2ef8d27dcda484918f20c", size = 248802, upload-time = "2025-08-23T14:41:02.012Z" }, - { url = "https://files.pythonhosted.org/packages/3f/bc/1011da599b414fb6c9c0f34086736126f9ff71f841755786a6b87601b088/coverage-7.10.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7fa22800f3908df31cea6fb230f20ac49e343515d968cc3a42b30d5c3ebf9b5a", size = 251550, upload-time = "2025-08-23T14:41:03.438Z" }, - { url = "https://files.pythonhosted.org/packages/4c/6f/b5c03c0c721c067d21bc697accc3642f3cef9f087dac429c918c37a37437/coverage-7.10.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f366a57ac81f5e12797136552f5b7502fa053c861a009b91b80ed51f2ce651c6", size = 252684, upload-time = "2025-08-23T14:41:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/f9/50/d474bc300ebcb6a38a1047d5c465a227605d6473e49b4e0d793102312bc5/coverage-7.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f1dc8f1980a272ad4a6c84cba7981792344dad33bf5869361576b7aef42733a", size = 250602, upload-time = "2025-08-23T14:41:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/4a/2d/548c8e04249cbba3aba6bd799efdd11eee3941b70253733f5d355d689559/coverage-7.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2285c04ee8676f7938b02b4936d9b9b672064daab3187c20f73a55f3d70e6b4a", size = 248724, upload-time = "2025-08-23T14:41:08.429Z" }, - { url = "https://files.pythonhosted.org/packages/e2/96/a7c3c0562266ac39dcad271d0eec8fc20ab576e3e2f64130a845ad2a557b/coverage-7.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2492e4dd9daab63f5f56286f8a04c51323d237631eb98505d87e4c4ff19ec34", size = 250158, upload-time = "2025-08-23T14:41:09.749Z" }, - { url = "https://files.pythonhosted.org/packages/f3/75/74d4be58c70c42ef0b352d597b022baf12dbe2b43e7cb1525f56a0fb1d4b/coverage-7.10.5-cp312-cp312-win32.whl", hash = "sha256:38a9109c4ee8135d5df5505384fc2f20287a47ccbe0b3f04c53c9a1989c2bbaf", size = 219493, upload-time = "2025-08-23T14:41:11.095Z" }, - { url = "https://files.pythonhosted.org/packages/4f/08/364e6012d1d4d09d1e27437382967efed971d7613f94bca9add25f0c1f2b/coverage-7.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:6b87f1ad60b30bc3c43c66afa7db6b22a3109902e28c5094957626a0143a001f", size = 220302, upload-time = "2025-08-23T14:41:12.449Z" }, - { url = "https://files.pythonhosted.org/packages/db/d5/7c8a365e1f7355c58af4fe5faf3f90cc8e587590f5854808d17ccb4e7077/coverage-7.10.5-cp312-cp312-win_arm64.whl", hash = "sha256:672a6c1da5aea6c629819a0e1461e89d244f78d7b60c424ecf4f1f2556c041d8", size = 218936, upload-time = "2025-08-23T14:41:13.872Z" }, - { url = "https://files.pythonhosted.org/packages/9f/08/4166ecfb60ba011444f38a5a6107814b80c34c717bc7a23be0d22e92ca09/coverage-7.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ef3b83594d933020f54cf65ea1f4405d1f4e41a009c46df629dd964fcb6e907c", size = 217106, upload-time = "2025-08-23T14:41:15.268Z" }, - { url = "https://files.pythonhosted.org/packages/25/d7/b71022408adbf040a680b8c64bf6ead3be37b553e5844f7465643979f7ca/coverage-7.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b96bfdf7c0ea9faebce088a3ecb2382819da4fbc05c7b80040dbc428df6af44", size = 217353, upload-time = "2025-08-23T14:41:16.656Z" }, - { url = "https://files.pythonhosted.org/packages/74/68/21e0d254dbf8972bb8dd95e3fe7038f4be037ff04ba47d6d1b12b37510ba/coverage-7.10.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:63df1fdaffa42d914d5c4d293e838937638bf75c794cf20bee12978fc8c4e3bc", size = 248350, upload-time = "2025-08-23T14:41:18.128Z" }, - { url = "https://files.pythonhosted.org/packages/90/65/28752c3a896566ec93e0219fc4f47ff71bd2b745f51554c93e8dcb659796/coverage-7.10.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8002dc6a049aac0e81ecec97abfb08c01ef0c1fbf962d0c98da3950ace89b869", size = 250955, upload-time = "2025-08-23T14:41:19.577Z" }, - { url = "https://files.pythonhosted.org/packages/a5/eb/ca6b7967f57f6fef31da8749ea20417790bb6723593c8cd98a987be20423/coverage-7.10.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63d4bb2966d6f5f705a6b0c6784c8969c468dbc4bcf9d9ded8bff1c7e092451f", size = 252230, upload-time = "2025-08-23T14:41:20.959Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/17a411b2a2a18f8b8c952aa01c00f9284a1fbc677c68a0003b772ea89104/coverage-7.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1f672efc0731a6846b157389b6e6d5d5e9e59d1d1a23a5c66a99fd58339914d5", size = 250387, upload-time = "2025-08-23T14:41:22.644Z" }, - { url = "https://files.pythonhosted.org/packages/c7/89/97a9e271188c2fbb3db82235c33980bcbc733da7da6065afbaa1d685a169/coverage-7.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3f39cef43d08049e8afc1fde4a5da8510fc6be843f8dea350ee46e2a26b2f54c", size = 248280, upload-time = "2025-08-23T14:41:24.061Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c6/0ad7d0137257553eb4706b4ad6180bec0a1b6a648b092c5bbda48d0e5b2c/coverage-7.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2968647e3ed5a6c019a419264386b013979ff1fb67dd11f5c9886c43d6a31fc2", size = 249894, upload-time = "2025-08-23T14:41:26.165Z" }, - { url = "https://files.pythonhosted.org/packages/84/56/fb3aba936addb4c9e5ea14f5979393f1c2466b4c89d10591fd05f2d6b2aa/coverage-7.10.5-cp313-cp313-win32.whl", hash = "sha256:0d511dda38595b2b6934c2b730a1fd57a3635c6aa2a04cb74714cdfdd53846f4", size = 219536, upload-time = "2025-08-23T14:41:27.694Z" }, - { url = "https://files.pythonhosted.org/packages/fc/54/baacb8f2f74431e3b175a9a2881feaa8feb6e2f187a0e7e3046f3c7742b2/coverage-7.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:9a86281794a393513cf117177fd39c796b3f8e3759bb2764259a2abba5cce54b", size = 220330, upload-time = "2025-08-23T14:41:29.081Z" }, - { url = "https://files.pythonhosted.org/packages/64/8a/82a3788f8e31dee51d350835b23d480548ea8621f3effd7c3ba3f7e5c006/coverage-7.10.5-cp313-cp313-win_arm64.whl", hash = "sha256:cebd8e906eb98bb09c10d1feed16096700b1198d482267f8bf0474e63a7b8d84", size = 218961, upload-time = "2025-08-23T14:41:30.511Z" }, - { url = "https://files.pythonhosted.org/packages/d8/a1/590154e6eae07beee3b111cc1f907c30da6fc8ce0a83ef756c72f3c7c748/coverage-7.10.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0520dff502da5e09d0d20781df74d8189ab334a1e40d5bafe2efaa4158e2d9e7", size = 217819, upload-time = "2025-08-23T14:41:31.962Z" }, - { url = "https://files.pythonhosted.org/packages/0d/ff/436ffa3cfc7741f0973c5c89405307fe39b78dcf201565b934e6616fc4ad/coverage-7.10.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d9cd64aca68f503ed3f1f18c7c9174cbb797baba02ca8ab5112f9d1c0328cd4b", size = 218040, upload-time = "2025-08-23T14:41:33.472Z" }, - { url = "https://files.pythonhosted.org/packages/a0/ca/5787fb3d7820e66273913affe8209c534ca11241eb34ee8c4fd2aaa9dd87/coverage-7.10.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0913dd1613a33b13c4f84aa6e3f4198c1a21ee28ccb4f674985c1f22109f0aae", size = 259374, upload-time = "2025-08-23T14:41:34.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/89/21af956843896adc2e64fc075eae3c1cadb97ee0a6960733e65e696f32dd/coverage-7.10.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1b7181c0feeb06ed8a02da02792f42f829a7b29990fef52eff257fef0885d760", size = 261551, upload-time = "2025-08-23T14:41:36.333Z" }, - { url = "https://files.pythonhosted.org/packages/e1/96/390a69244ab837e0ac137989277879a084c786cf036c3c4a3b9637d43a89/coverage-7.10.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36d42b7396b605f774d4372dd9c49bed71cbabce4ae1ccd074d155709dd8f235", size = 263776, upload-time = "2025-08-23T14:41:38.25Z" }, - { url = "https://files.pythonhosted.org/packages/00/32/cfd6ae1da0a521723349f3129b2455832fc27d3f8882c07e5b6fefdd0da2/coverage-7.10.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b4fdc777e05c4940b297bf47bf7eedd56a39a61dc23ba798e4b830d585486ca5", size = 261326, upload-time = "2025-08-23T14:41:40.343Z" }, - { url = "https://files.pythonhosted.org/packages/4c/c4/bf8d459fb4ce2201e9243ce6c015936ad283a668774430a3755f467b39d1/coverage-7.10.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:42144e8e346de44a6f1dbd0a56575dd8ab8dfa7e9007da02ea5b1c30ab33a7db", size = 259090, upload-time = "2025-08-23T14:41:42.106Z" }, - { url = "https://files.pythonhosted.org/packages/f4/5d/a234f7409896468e5539d42234016045e4015e857488b0b5b5f3f3fa5f2b/coverage-7.10.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:66c644cbd7aed8fe266d5917e2c9f65458a51cfe5eeff9c05f15b335f697066e", size = 260217, upload-time = "2025-08-23T14:41:43.591Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ad/87560f036099f46c2ddd235be6476dd5c1d6be6bb57569a9348d43eeecea/coverage-7.10.5-cp313-cp313t-win32.whl", hash = "sha256:2d1b73023854068c44b0c554578a4e1ef1b050ed07cf8b431549e624a29a66ee", size = 220194, upload-time = "2025-08-23T14:41:45.051Z" }, - { url = "https://files.pythonhosted.org/packages/36/a8/04a482594fdd83dc677d4a6c7e2d62135fff5a1573059806b8383fad9071/coverage-7.10.5-cp313-cp313t-win_amd64.whl", hash = "sha256:54a1532c8a642d8cc0bd5a9a51f5a9dcc440294fd06e9dda55e743c5ec1a8f14", size = 221258, upload-time = "2025-08-23T14:41:46.44Z" }, - { url = "https://files.pythonhosted.org/packages/eb/ad/7da28594ab66fe2bc720f1bc9b131e62e9b4c6e39f044d9a48d18429cc21/coverage-7.10.5-cp313-cp313t-win_arm64.whl", hash = "sha256:74d5b63fe3f5f5d372253a4ef92492c11a4305f3550631beaa432fc9df16fcff", size = 219521, upload-time = "2025-08-23T14:41:47.882Z" }, - { url = "https://files.pythonhosted.org/packages/d3/7f/c8b6e4e664b8a95254c35a6c8dd0bf4db201ec681c169aae2f1256e05c85/coverage-7.10.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:68c5e0bc5f44f68053369fa0d94459c84548a77660a5f2561c5e5f1e3bed7031", size = 217090, upload-time = "2025-08-23T14:41:49.327Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/3ee14ede30a6e10a94a104d1d0522d5fb909a7c7cac2643d2a79891ff3b9/coverage-7.10.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cf33134ffae93865e32e1e37df043bef15a5e857d8caebc0099d225c579b0fa3", size = 217365, upload-time = "2025-08-23T14:41:50.796Z" }, - { url = "https://files.pythonhosted.org/packages/41/5f/06ac21bf87dfb7620d1f870dfa3c2cae1186ccbcdc50b8b36e27a0d52f50/coverage-7.10.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad8fa9d5193bafcf668231294241302b5e683a0518bf1e33a9a0dfb142ec3031", size = 248413, upload-time = "2025-08-23T14:41:52.5Z" }, - { url = "https://files.pythonhosted.org/packages/21/bc/cc5bed6e985d3a14228539631573f3863be6a2587381e8bc5fdf786377a1/coverage-7.10.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:146fa1531973d38ab4b689bc764592fe6c2f913e7e80a39e7eeafd11f0ef6db2", size = 250943, upload-time = "2025-08-23T14:41:53.922Z" }, - { url = "https://files.pythonhosted.org/packages/8d/43/6a9fc323c2c75cd80b18d58db4a25dc8487f86dd9070f9592e43e3967363/coverage-7.10.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6013a37b8a4854c478d3219ee8bc2392dea51602dd0803a12d6f6182a0061762", size = 252301, upload-time = "2025-08-23T14:41:56.528Z" }, - { url = "https://files.pythonhosted.org/packages/69/7c/3e791b8845f4cd515275743e3775adb86273576596dc9f02dca37357b4f2/coverage-7.10.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:eb90fe20db9c3d930fa2ad7a308207ab5b86bf6a76f54ab6a40be4012d88fcae", size = 250302, upload-time = "2025-08-23T14:41:58.171Z" }, - { url = "https://files.pythonhosted.org/packages/5c/bc/5099c1e1cb0c9ac6491b281babea6ebbf999d949bf4aa8cdf4f2b53505e8/coverage-7.10.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:384b34482272e960c438703cafe63316dfbea124ac62006a455c8410bf2a2262", size = 248237, upload-time = "2025-08-23T14:41:59.703Z" }, - { url = "https://files.pythonhosted.org/packages/7e/51/d346eb750a0b2f1e77f391498b753ea906fde69cc11e4b38dca28c10c88c/coverage-7.10.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:467dc74bd0a1a7de2bedf8deaf6811f43602cb532bd34d81ffd6038d6d8abe99", size = 249726, upload-time = "2025-08-23T14:42:01.343Z" }, - { url = "https://files.pythonhosted.org/packages/a3/85/eebcaa0edafe427e93286b94f56ea7e1280f2c49da0a776a6f37e04481f9/coverage-7.10.5-cp314-cp314-win32.whl", hash = "sha256:556d23d4e6393ca898b2e63a5bca91e9ac2d5fb13299ec286cd69a09a7187fde", size = 219825, upload-time = "2025-08-23T14:42:03.263Z" }, - { url = "https://files.pythonhosted.org/packages/3c/f7/6d43e037820742603f1e855feb23463979bf40bd27d0cde1f761dcc66a3e/coverage-7.10.5-cp314-cp314-win_amd64.whl", hash = "sha256:f4446a9547681533c8fa3e3c6cf62121eeee616e6a92bd9201c6edd91beffe13", size = 220618, upload-time = "2025-08-23T14:42:05.037Z" }, - { url = "https://files.pythonhosted.org/packages/4a/b0/ed9432e41424c51509d1da603b0393404b828906236fb87e2c8482a93468/coverage-7.10.5-cp314-cp314-win_arm64.whl", hash = "sha256:5e78bd9cf65da4c303bf663de0d73bf69f81e878bf72a94e9af67137c69b9fe9", size = 219199, upload-time = "2025-08-23T14:42:06.662Z" }, - { url = "https://files.pythonhosted.org/packages/2f/54/5a7ecfa77910f22b659c820f67c16fc1e149ed132ad7117f0364679a8fa9/coverage-7.10.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:5661bf987d91ec756a47c7e5df4fbcb949f39e32f9334ccd3f43233bbb65e508", size = 217833, upload-time = "2025-08-23T14:42:08.262Z" }, - { url = "https://files.pythonhosted.org/packages/4e/0e/25672d917cc57857d40edf38f0b867fb9627115294e4f92c8fcbbc18598d/coverage-7.10.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a46473129244db42a720439a26984f8c6f834762fc4573616c1f37f13994b357", size = 218048, upload-time = "2025-08-23T14:42:10.247Z" }, - { url = "https://files.pythonhosted.org/packages/cb/7c/0b2b4f1c6f71885d4d4b2b8608dcfc79057adb7da4143eb17d6260389e42/coverage-7.10.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1f64b8d3415d60f24b058b58d859e9512624bdfa57a2d1f8aff93c1ec45c429b", size = 259549, upload-time = "2025-08-23T14:42:11.811Z" }, - { url = "https://files.pythonhosted.org/packages/94/73/abb8dab1609abec7308d83c6aec547944070526578ee6c833d2da9a0ad42/coverage-7.10.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:44d43de99a9d90b20e0163f9770542357f58860a26e24dc1d924643bd6aa7cb4", size = 261715, upload-time = "2025-08-23T14:42:13.505Z" }, - { url = "https://files.pythonhosted.org/packages/0b/d1/abf31de21ec92731445606b8d5e6fa5144653c2788758fcf1f47adb7159a/coverage-7.10.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a931a87e5ddb6b6404e65443b742cb1c14959622777f2a4efd81fba84f5d91ba", size = 263969, upload-time = "2025-08-23T14:42:15.422Z" }, - { url = "https://files.pythonhosted.org/packages/9c/b3/ef274927f4ebede96056173b620db649cc9cb746c61ffc467946b9d0bc67/coverage-7.10.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9559b906a100029274448f4c8b8b0a127daa4dade5661dfd821b8c188058842", size = 261408, upload-time = "2025-08-23T14:42:16.971Z" }, - { url = "https://files.pythonhosted.org/packages/20/fc/83ca2812be616d69b4cdd4e0c62a7bc526d56875e68fd0f79d47c7923584/coverage-7.10.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b08801e25e3b4526ef9ced1aa29344131a8f5213c60c03c18fe4c6170ffa2874", size = 259168, upload-time = "2025-08-23T14:42:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/fc/4f/e0779e5716f72d5c9962e709d09815d02b3b54724e38567308304c3fc9df/coverage-7.10.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ed9749bb8eda35f8b636fb7632f1c62f735a236a5d4edadd8bbcc5ea0542e732", size = 260317, upload-time = "2025-08-23T14:42:20.005Z" }, - { url = "https://files.pythonhosted.org/packages/2b/fe/4247e732f2234bb5eb9984a0888a70980d681f03cbf433ba7b48f08ca5d5/coverage-7.10.5-cp314-cp314t-win32.whl", hash = "sha256:609b60d123fc2cc63ccee6d17e4676699075db72d14ac3c107cc4976d516f2df", size = 220600, upload-time = "2025-08-23T14:42:22.027Z" }, - { url = "https://files.pythonhosted.org/packages/a7/a0/f294cff6d1034b87839987e5b6ac7385bec599c44d08e0857ac7f164ad0c/coverage-7.10.5-cp314-cp314t-win_amd64.whl", hash = "sha256:0666cf3d2c1626b5a3463fd5b05f5e21f99e6aec40a3192eee4d07a15970b07f", size = 221714, upload-time = "2025-08-23T14:42:23.616Z" }, - { url = "https://files.pythonhosted.org/packages/23/18/fa1afdc60b5528d17416df440bcbd8fd12da12bfea9da5b6ae0f7a37d0f7/coverage-7.10.5-cp314-cp314t-win_arm64.whl", hash = "sha256:bc85eb2d35e760120540afddd3044a5bf69118a91a296a8b3940dfc4fdcfe1e2", size = 219735, upload-time = "2025-08-23T14:42:25.156Z" }, - { url = "https://files.pythonhosted.org/packages/3b/21/05248e8bc74683488cb7477e6b6b878decadd15af0ec96f56381d3d7ff2d/coverage-7.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:62835c1b00c4a4ace24c1a88561a5a59b612fbb83a525d1c70ff5720c97c0610", size = 216763, upload-time = "2025-08-23T14:42:26.75Z" }, - { url = "https://files.pythonhosted.org/packages/a9/7f/161a0ad40cb1c7e19dc1aae106d3430cc88dac3d651796d6cf3f3730c800/coverage-7.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5255b3bbcc1d32a4069d6403820ac8e6dbcc1d68cb28a60a1ebf17e47028e898", size = 217154, upload-time = "2025-08-23T14:42:28.238Z" }, - { url = "https://files.pythonhosted.org/packages/de/31/41929ee53af829ea5a88e71d335ea09d0bb587a3da1c5e58e59b48473ed8/coverage-7.10.5-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3876385722e335d6e991c430302c24251ef9c2a9701b2b390f5473199b1b8ebf", size = 243588, upload-time = "2025-08-23T14:42:29.798Z" }, - { url = "https://files.pythonhosted.org/packages/6e/4e/2649344e33eeb3567041e8255a1942173cae81817fe06b60f3fafaafe111/coverage-7.10.5-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8048ce4b149c93447a55d279078c8ae98b08a6951a3c4d2d7e87f4efc7bfe100", size = 245412, upload-time = "2025-08-23T14:42:31.296Z" }, - { url = "https://files.pythonhosted.org/packages/ac/b1/b21e1e69986ad89b051dd42c3ef06d9326e03ac3c0c844fc33385d1d9e35/coverage-7.10.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4028e7558e268dd8bcf4d9484aad393cafa654c24b4885f6f9474bf53183a82a", size = 247182, upload-time = "2025-08-23T14:42:33.155Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b5/80837be411ae092e03fcc2a7877bd9a659c531eff50453e463057a9eee44/coverage-7.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03f47dc870eec0367fcdd603ca6a01517d2504e83dc18dbfafae37faec66129a", size = 245066, upload-time = "2025-08-23T14:42:34.754Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ed/fcb0838ddf149d68d09f89af57397b0dd9d26b100cc729daf1b0caf0b2d3/coverage-7.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2d488d7d42b6ded7ea0704884f89dcabd2619505457de8fc9a6011c62106f6e5", size = 243138, upload-time = "2025-08-23T14:42:36.311Z" }, - { url = "https://files.pythonhosted.org/packages/75/0f/505c6af24a9ae5d8919d209b9c31b7092815f468fa43bec3b1118232c62a/coverage-7.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b3dcf2ead47fa8be14224ee817dfc1df98043af568fe120a22f81c0eb3c34ad2", size = 244095, upload-time = "2025-08-23T14:42:38.227Z" }, - { url = "https://files.pythonhosted.org/packages/e4/7e/c82a8bede46217c1d944bd19b65e7106633b998640f00ab49c5f747a5844/coverage-7.10.5-cp39-cp39-win32.whl", hash = "sha256:02650a11324b80057b8c9c29487020073d5e98a498f1857f37e3f9b6ea1b2426", size = 219289, upload-time = "2025-08-23T14:42:39.827Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ac/46645ef6be543f2e7de08cc2601a0b67e130c816be3b749ab741be689fb9/coverage-7.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:b45264dd450a10f9e03237b41a9a24e85cbb1e278e5a32adb1a303f58f0017f3", size = 220199, upload-time = "2025-08-23T14:42:41.363Z" }, - { url = "https://files.pythonhosted.org/packages/08/b6/fff6609354deba9aeec466e4bcaeb9d1ed3e5d60b14b57df2a36fb2273f2/coverage-7.10.5-py3-none-any.whl", hash = "sha256:0be24d35e4db1d23d0db5c0f6a74a962e2ec83c426b5cac09f4234aadef38e4a", size = 208736, upload-time = "2025-08-23T14:42:43.145Z" }, +version = "7.10.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/14/70/025b179c993f019105b79575ac6edb5e084fb0f0e63f15cdebef4e454fb5/coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90", size = 823736, upload-time = "2025-08-29T15:35:16.668Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/1d/2e64b43d978b5bd184e0756a41415597dfef30fcbd90b747474bd749d45f/coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356", size = 217025, upload-time = "2025-08-29T15:32:57.169Z" }, + { url = "https://files.pythonhosted.org/packages/23/62/b1e0f513417c02cc10ef735c3ee5186df55f190f70498b3702d516aad06f/coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301", size = 217419, upload-time = "2025-08-29T15:32:59.908Z" }, + { url = "https://files.pythonhosted.org/packages/e7/16/b800640b7a43e7c538429e4d7223e0a94fd72453a1a048f70bf766f12e96/coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460", size = 244180, upload-time = "2025-08-29T15:33:01.608Z" }, + { url = "https://files.pythonhosted.org/packages/fb/6f/5e03631c3305cad187eaf76af0b559fff88af9a0b0c180d006fb02413d7a/coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd", size = 245992, upload-time = "2025-08-29T15:33:03.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a1/f30ea0fb400b080730125b490771ec62b3375789f90af0bb68bfb8a921d7/coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb", size = 247851, upload-time = "2025-08-29T15:33:04.603Z" }, + { url = "https://files.pythonhosted.org/packages/02/8e/cfa8fee8e8ef9a6bb76c7bef039f3302f44e615d2194161a21d3d83ac2e9/coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6", size = 245891, upload-time = "2025-08-29T15:33:06.176Z" }, + { url = "https://files.pythonhosted.org/packages/93/a9/51be09b75c55c4f6c16d8d73a6a1d46ad764acca0eab48fa2ffaef5958fe/coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945", size = 243909, upload-time = "2025-08-29T15:33:07.74Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a6/ba188b376529ce36483b2d585ca7bdac64aacbe5aa10da5978029a9c94db/coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e", size = 244786, upload-time = "2025-08-29T15:33:08.965Z" }, + { url = "https://files.pythonhosted.org/packages/d0/4c/37ed872374a21813e0d3215256180c9a382c3f5ced6f2e5da0102fc2fd3e/coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1", size = 219521, upload-time = "2025-08-29T15:33:10.599Z" }, + { url = "https://files.pythonhosted.org/packages/8e/36/9311352fdc551dec5b973b61f4e453227ce482985a9368305880af4f85dd/coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528", size = 220417, upload-time = "2025-08-29T15:33:11.907Z" }, + { url = "https://files.pythonhosted.org/packages/d4/16/2bea27e212c4980753d6d563a0803c150edeaaddb0771a50d2afc410a261/coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f", size = 217129, upload-time = "2025-08-29T15:33:13.575Z" }, + { url = "https://files.pythonhosted.org/packages/2a/51/e7159e068831ab37e31aac0969d47b8c5ee25b7d307b51e310ec34869315/coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc", size = 217532, upload-time = "2025-08-29T15:33:14.872Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c0/246ccbea53d6099325d25cd208df94ea435cd55f0db38099dd721efc7a1f/coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a", size = 247931, upload-time = "2025-08-29T15:33:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/7435ef8ab9b2594a6e3f58505cc30e98ae8b33265d844007737946c59389/coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a", size = 249864, upload-time = "2025-08-29T15:33:17.434Z" }, + { url = "https://files.pythonhosted.org/packages/51/f8/d9d64e8da7bcddb094d511154824038833c81e3a039020a9d6539bf303e9/coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62", size = 251969, upload-time = "2025-08-29T15:33:18.822Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/c43ba0ef19f446d6463c751315140d8f2a521e04c3e79e5c5fe211bfa430/coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153", size = 249659, upload-time = "2025-08-29T15:33:20.407Z" }, + { url = "https://files.pythonhosted.org/packages/79/3e/53635bd0b72beaacf265784508a0b386defc9ab7fad99ff95f79ce9db555/coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5", size = 247714, upload-time = "2025-08-29T15:33:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/4c/55/0964aa87126624e8c159e32b0bc4e84edef78c89a1a4b924d28dd8265625/coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619", size = 248351, upload-time = "2025-08-29T15:33:23.105Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ab/6cfa9dc518c6c8e14a691c54e53a9433ba67336c760607e299bfcf520cb1/coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba", size = 219562, upload-time = "2025-08-29T15:33:24.717Z" }, + { url = "https://files.pythonhosted.org/packages/5b/18/99b25346690cbc55922e7cfef06d755d4abee803ef335baff0014268eff4/coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e", size = 220453, upload-time = "2025-08-29T15:33:26.482Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ed/81d86648a07ccb124a5cf1f1a7788712b8d7216b593562683cd5c9b0d2c1/coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c", size = 219127, upload-time = "2025-08-29T15:33:27.777Z" }, + { url = "https://files.pythonhosted.org/packages/26/06/263f3305c97ad78aab066d116b52250dd316e74fcc20c197b61e07eb391a/coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea", size = 217324, upload-time = "2025-08-29T15:33:29.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/60/1e1ded9a4fe80d843d7d53b3e395c1db3ff32d6c301e501f393b2e6c1c1f/coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634", size = 217560, upload-time = "2025-08-29T15:33:30.748Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/52136173c14e26dfed8b106ed725811bb53c30b896d04d28d74cb64318b3/coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6", size = 249053, upload-time = "2025-08-29T15:33:32.041Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1d/ae25a7dc58fcce8b172d42ffe5313fc267afe61c97fa872b80ee72d9515a/coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9", size = 251802, upload-time = "2025-08-29T15:33:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/f5/7a/1f561d47743710fe996957ed7c124b421320f150f1d38523d8d9102d3e2a/coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c", size = 252935, upload-time = "2025-08-29T15:33:34.909Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ad/8b97cd5d28aecdfde792dcbf646bac141167a5cacae2cd775998b45fabb5/coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a", size = 250855, upload-time = "2025-08-29T15:33:36.922Z" }, + { url = "https://files.pythonhosted.org/packages/33/6a/95c32b558d9a61858ff9d79580d3877df3eb5bc9eed0941b1f187c89e143/coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5", size = 248974, upload-time = "2025-08-29T15:33:38.175Z" }, + { url = "https://files.pythonhosted.org/packages/0d/9c/8ce95dee640a38e760d5b747c10913e7a06554704d60b41e73fdea6a1ffd/coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972", size = 250409, upload-time = "2025-08-29T15:33:39.447Z" }, + { url = "https://files.pythonhosted.org/packages/04/12/7a55b0bdde78a98e2eb2356771fd2dcddb96579e8342bb52aa5bc52e96f0/coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d", size = 219724, upload-time = "2025-08-29T15:33:41.172Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/32b185b8b8e327802c9efce3d3108d2fe2d9d31f153a0f7ecfd59c773705/coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629", size = 220536, upload-time = "2025-08-29T15:33:42.524Z" }, + { url = "https://files.pythonhosted.org/packages/08/3a/d5d8dc703e4998038c3099eaf77adddb00536a3cec08c8dcd556a36a3eb4/coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80", size = 219171, upload-time = "2025-08-29T15:33:43.974Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e7/917e5953ea29a28c1057729c1d5af9084ab6d9c66217523fd0e10f14d8f6/coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6", size = 217351, upload-time = "2025-08-29T15:33:45.438Z" }, + { url = "https://files.pythonhosted.org/packages/eb/86/2e161b93a4f11d0ea93f9bebb6a53f113d5d6e416d7561ca41bb0a29996b/coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80", size = 217600, upload-time = "2025-08-29T15:33:47.269Z" }, + { url = "https://files.pythonhosted.org/packages/0e/66/d03348fdd8df262b3a7fb4ee5727e6e4936e39e2f3a842e803196946f200/coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003", size = 248600, upload-time = "2025-08-29T15:33:48.953Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/508420fb47d09d904d962f123221bc249f64b5e56aa93d5f5f7603be475f/coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27", size = 251206, upload-time = "2025-08-29T15:33:50.697Z" }, + { url = "https://files.pythonhosted.org/packages/e9/1f/9020135734184f439da85c70ea78194c2730e56c2d18aee6e8ff1719d50d/coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4", size = 252478, upload-time = "2025-08-29T15:33:52.303Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a4/3d228f3942bb5a2051fde28c136eea23a761177dc4ff4ef54533164ce255/coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d", size = 250637, upload-time = "2025-08-29T15:33:53.67Z" }, + { url = "https://files.pythonhosted.org/packages/36/e3/293dce8cdb9a83de971637afc59b7190faad60603b40e32635cbd15fbf61/coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc", size = 248529, upload-time = "2025-08-29T15:33:55.022Z" }, + { url = "https://files.pythonhosted.org/packages/90/26/64eecfa214e80dd1d101e420cab2901827de0e49631d666543d0e53cf597/coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc", size = 250143, upload-time = "2025-08-29T15:33:56.386Z" }, + { url = "https://files.pythonhosted.org/packages/3e/70/bd80588338f65ea5b0d97e424b820fb4068b9cfb9597fbd91963086e004b/coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e", size = 219770, upload-time = "2025-08-29T15:33:58.063Z" }, + { url = "https://files.pythonhosted.org/packages/a7/14/0b831122305abcc1060c008f6c97bbdc0a913ab47d65070a01dc50293c2b/coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32", size = 220566, upload-time = "2025-08-29T15:33:59.766Z" }, + { url = "https://files.pythonhosted.org/packages/83/c6/81a83778c1f83f1a4a168ed6673eeedc205afb562d8500175292ca64b94e/coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2", size = 219195, upload-time = "2025-08-29T15:34:01.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1c/ccccf4bf116f9517275fa85047495515add43e41dfe8e0bef6e333c6b344/coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b", size = 218059, upload-time = "2025-08-29T15:34:02.91Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/8a3ceff833d27c7492af4f39d5da6761e9ff624831db9e9f25b3886ddbca/coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393", size = 218287, upload-time = "2025-08-29T15:34:05.106Z" }, + { url = "https://files.pythonhosted.org/packages/92/d8/50b4a32580cf41ff0423777a2791aaf3269ab60c840b62009aec12d3970d/coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27", size = 259625, upload-time = "2025-08-29T15:34:06.575Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7e/6a7df5a6fb440a0179d94a348eb6616ed4745e7df26bf2a02bc4db72c421/coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df", size = 261801, upload-time = "2025-08-29T15:34:08.006Z" }, + { url = "https://files.pythonhosted.org/packages/3a/4c/a270a414f4ed5d196b9d3d67922968e768cd971d1b251e1b4f75e9362f75/coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb", size = 264027, upload-time = "2025-08-29T15:34:09.806Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8b/3210d663d594926c12f373c5370bf1e7c5c3a427519a8afa65b561b9a55c/coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282", size = 261576, upload-time = "2025-08-29T15:34:11.585Z" }, + { url = "https://files.pythonhosted.org/packages/72/d0/e1961eff67e9e1dba3fc5eb7a4caf726b35a5b03776892da8d79ec895775/coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4", size = 259341, upload-time = "2025-08-29T15:34:13.159Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/d6478d152cd189b33eac691cba27a40704990ba95de49771285f34a5861e/coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21", size = 260468, upload-time = "2025-08-29T15:34:14.571Z" }, + { url = "https://files.pythonhosted.org/packages/ed/73/737440247c914a332f0b47f7598535b29965bf305e19bbc22d4c39615d2b/coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0", size = 220429, upload-time = "2025-08-29T15:34:16.394Z" }, + { url = "https://files.pythonhosted.org/packages/bd/76/b92d3214740f2357ef4a27c75a526eb6c28f79c402e9f20a922c295c05e2/coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5", size = 221493, upload-time = "2025-08-29T15:34:17.835Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/6dcb29c599c8a1f654ec6cb68d76644fe635513af16e932d2d4ad1e5ac6e/coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b", size = 219757, upload-time = "2025-08-29T15:34:19.248Z" }, + { url = "https://files.pythonhosted.org/packages/d3/aa/76cf0b5ec00619ef208da4689281d48b57f2c7fde883d14bf9441b74d59f/coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e", size = 217331, upload-time = "2025-08-29T15:34:20.846Z" }, + { url = "https://files.pythonhosted.org/packages/65/91/8e41b8c7c505d398d7730206f3cbb4a875a35ca1041efc518051bfce0f6b/coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb", size = 217607, upload-time = "2025-08-29T15:34:22.433Z" }, + { url = "https://files.pythonhosted.org/packages/87/7f/f718e732a423d442e6616580a951b8d1ec3575ea48bcd0e2228386805e79/coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034", size = 248663, upload-time = "2025-08-29T15:34:24.425Z" }, + { url = "https://files.pythonhosted.org/packages/e6/52/c1106120e6d801ac03e12b5285e971e758e925b6f82ee9b86db3aa10045d/coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1", size = 251197, upload-time = "2025-08-29T15:34:25.906Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ec/3a8645b1bb40e36acde9c0609f08942852a4af91a937fe2c129a38f2d3f5/coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a", size = 252551, upload-time = "2025-08-29T15:34:27.337Z" }, + { url = "https://files.pythonhosted.org/packages/a1/70/09ecb68eeb1155b28a1d16525fd3a9b65fbe75337311a99830df935d62b6/coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb", size = 250553, upload-time = "2025-08-29T15:34:29.065Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/47df374b893fa812e953b5bc93dcb1427a7b3d7a1a7d2db33043d17f74b9/coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d", size = 248486, upload-time = "2025-08-29T15:34:30.897Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/9f98640979ecee1b0d1a7164b589de720ddf8100d1747d9bbdb84be0c0fb/coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747", size = 249981, upload-time = "2025-08-29T15:34:32.365Z" }, + { url = "https://files.pythonhosted.org/packages/1f/55/eeb6603371e6629037f47bd25bef300387257ed53a3c5fdb159b7ac8c651/coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5", size = 220054, upload-time = "2025-08-29T15:34:34.124Z" }, + { url = "https://files.pythonhosted.org/packages/15/d1/a0912b7611bc35412e919a2cd59ae98e7ea3b475e562668040a43fb27897/coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713", size = 220851, upload-time = "2025-08-29T15:34:35.651Z" }, + { url = "https://files.pythonhosted.org/packages/ef/2d/11880bb8ef80a45338e0b3e0725e4c2d73ffbb4822c29d987078224fd6a5/coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32", size = 219429, upload-time = "2025-08-29T15:34:37.16Z" }, + { url = "https://files.pythonhosted.org/packages/83/c0/1f00caad775c03a700146f55536ecd097a881ff08d310a58b353a1421be0/coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65", size = 218080, upload-time = "2025-08-29T15:34:38.919Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c4/b1c5d2bd7cc412cbeb035e257fd06ed4e3e139ac871d16a07434e145d18d/coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6", size = 218293, upload-time = "2025-08-29T15:34:40.425Z" }, + { url = "https://files.pythonhosted.org/packages/3f/07/4468d37c94724bf6ec354e4ec2f205fda194343e3e85fd2e59cec57e6a54/coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0", size = 259800, upload-time = "2025-08-29T15:34:41.996Z" }, + { url = "https://files.pythonhosted.org/packages/82/d8/f8fb351be5fee31690cd8da768fd62f1cfab33c31d9f7baba6cd8960f6b8/coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e", size = 261965, upload-time = "2025-08-29T15:34:43.61Z" }, + { url = "https://files.pythonhosted.org/packages/e8/70/65d4d7cfc75c5c6eb2fed3ee5cdf420fd8ae09c4808723a89a81d5b1b9c3/coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5", size = 264220, upload-time = "2025-08-29T15:34:45.387Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/069df106d19024324cde10e4ec379fe2fb978017d25e97ebee23002fbadf/coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7", size = 261660, upload-time = "2025-08-29T15:34:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8a/2974d53904080c5dc91af798b3a54a4ccb99a45595cc0dcec6eb9616a57d/coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5", size = 259417, upload-time = "2025-08-29T15:34:48.779Z" }, + { url = "https://files.pythonhosted.org/packages/30/38/9616a6b49c686394b318974d7f6e08f38b8af2270ce7488e879888d1e5db/coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0", size = 260567, upload-time = "2025-08-29T15:34:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/76/16/3ed2d6312b371a8cf804abf4e14895b70e4c3491c6e53536d63fd0958a8d/coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7", size = 220831, upload-time = "2025-08-29T15:34:52.653Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e5/d38d0cb830abede2adb8b147770d2a3d0e7fecc7228245b9b1ae6c24930a/coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930", size = 221950, upload-time = "2025-08-29T15:34:54.212Z" }, + { url = "https://files.pythonhosted.org/packages/f4/51/e48e550f6279349895b0ffcd6d2a690e3131ba3a7f4eafccc141966d4dea/coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b", size = 219969, upload-time = "2025-08-29T15:34:55.83Z" }, + { url = "https://files.pythonhosted.org/packages/91/70/f73ad83b1d2fd2d5825ac58c8f551193433a7deaf9b0d00a8b69ef61cd9a/coverage-7.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90558c35af64971d65fbd935c32010f9a2f52776103a259f1dee865fe8259352", size = 217009, upload-time = "2025-08-29T15:34:57.381Z" }, + { url = "https://files.pythonhosted.org/packages/01/e8/099b55cd48922abbd4b01ddd9ffa352408614413ebfc965501e981aced6b/coverage-7.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8953746d371e5695405806c46d705a3cd170b9cc2b9f93953ad838f6c1e58612", size = 217400, upload-time = "2025-08-29T15:34:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/ee/d1/c6bac7c9e1003110a318636fef3b5c039df57ab44abcc41d43262a163c28/coverage-7.10.6-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c83f6afb480eae0313114297d29d7c295670a41c11b274e6bca0c64540c1ce7b", size = 243835, upload-time = "2025-08-29T15:35:00.541Z" }, + { url = "https://files.pythonhosted.org/packages/01/f9/82c6c061838afbd2172e773156c0aa84a901d59211b4975a4e93accf5c89/coverage-7.10.6-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7eb68d356ba0cc158ca535ce1381dbf2037fa8cb5b1ae5ddfc302e7317d04144", size = 245658, upload-time = "2025-08-29T15:35:02.135Z" }, + { url = "https://files.pythonhosted.org/packages/81/6a/35674445b1d38161148558a3ff51b0aa7f0b54b1def3abe3fbd34efe05bc/coverage-7.10.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b15a87265e96307482746d86995f4bff282f14b027db75469c446da6127433b", size = 247433, upload-time = "2025-08-29T15:35:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/98c99e7cafb288730a93535092eb433b5503d529869791681c4f2e2012a8/coverage-7.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fc53ba868875bfbb66ee447d64d6413c2db91fddcfca57025a0e7ab5b07d5862", size = 245315, upload-time = "2025-08-29T15:35:05.629Z" }, + { url = "https://files.pythonhosted.org/packages/09/05/123e0dba812408c719c319dea05782433246f7aa7b67e60402d90e847545/coverage-7.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efeda443000aa23f276f4df973cb82beca682fd800bb119d19e80504ffe53ec2", size = 243385, upload-time = "2025-08-29T15:35:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/67/52/d57a42502aef05c6325f28e2e81216c2d9b489040132c18725b7a04d1448/coverage-7.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9702b59d582ff1e184945d8b501ffdd08d2cee38d93a2206aa5f1365ce0b8d78", size = 244343, upload-time = "2025-08-29T15:35:09.55Z" }, + { url = "https://files.pythonhosted.org/packages/6b/22/7f6fad7dbb37cf99b542c5e157d463bd96b797078b1ec506691bc836f476/coverage-7.10.6-cp39-cp39-win32.whl", hash = "sha256:2195f8e16ba1a44651ca684db2ea2b2d4b5345da12f07d9c22a395202a05b23c", size = 219530, upload-time = "2025-08-29T15:35:11.167Z" }, + { url = "https://files.pythonhosted.org/packages/62/30/e2fda29bfe335026027e11e6a5e57a764c9df13127b5cf42af4c3e99b937/coverage-7.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:f32ff80e7ef6a5b5b606ea69a36e97b219cd9dc799bcf2963018a4d8f788cfbf", size = 220432, upload-time = "2025-08-29T15:35:12.902Z" }, + { url = "https://files.pythonhosted.org/packages/44/0c/50db5379b615854b5cf89146f8f5bd1d5a9693d7f3a987e269693521c404/coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3", size = 208986, upload-time = "2025-08-29T15:35:14.506Z" }, ] [package.optional-dependencies] @@ -1105,7 +1150,8 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "requests" }, - { name = "urllib3" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } wheels = [ @@ -1493,6 +1539,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2f/e0/014d5d9d7a4564cf1c40b5039bc882db69fd881111e03ab3657ac0b218e2/fsspec-2025.7.0-py3-none-any.whl", hash = "sha256:8b012e39f63c7d5f10474de957f3ab793b47b45ae7d39f2fb735f8bbe25c0e21", size = 199597, upload-time = "2025-07-15T16:05:19.529Z" }, ] +[package.optional-dependencies] +s3 = [ + { name = "s3fs" }, +] + [[package]] name = "google-api-core" version = "2.25.1" @@ -1969,6 +2020,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + [[package]] name = "litestar" version = "2.17.0" @@ -2136,7 +2196,8 @@ dependencies = [ { name = "certifi" }, { name = "pycryptodome" }, { name = "typing-extensions" }, - { name = "urllib3" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/f4/a0/33ea2e18d5169817950edc13eba58cd781cedefe9f6696cae26aa2d75882/minio-7.2.16.tar.gz", hash = "sha256:81e365c8494d591d8204a63ee7596bfdf8a7d06ad1b1507d6b9c1664a95f299a", size = 139149, upload-time = "2025-07-21T20:11:15.911Z" } wheels = [ @@ -4145,14 +4206,14 @@ wheels = [ [[package]] name = "questionary" -version = "2.1.0" +version = "2.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "prompt-toolkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/b8/d16eb579277f3de9e56e5ad25280fab52fc5774117fb70362e8c2e016559/questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587", size = 26775, upload-time = "2024-12-29T11:49:17.802Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/45/eafb0bba0f9988f6a2520f9ca2df2c82ddfa8d67c95d6625452e97b204a5/questionary-2.1.1.tar.gz", hash = "sha256:3d7e980292bb0107abaa79c68dd3eee3c561b83a0f89ae482860b181c8bd412d", size = 25845, upload-time = "2025-08-28T19:00:20.851Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/3f/11dd4cd4f39e05128bfd20138faea57bec56f9ffba6185d276e3107ba5b2/questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec", size = 36747, upload-time = "2024-12-29T11:49:16.734Z" }, + { url = "https://files.pythonhosted.org/packages/3c/26/1062c7ec1b053db9e499b4d2d5bc231743201b74051c973dadeac80a8f43/questionary-2.1.1-py3-none-any.whl", hash = "sha256:a51af13f345f1cdea62347589fbb6df3b290306ab8930713bfae4d475a7d4a59", size = 36753, upload-time = "2025-08-28T19:00:19.56Z" }, ] [[package]] @@ -4163,7 +4224,8 @@ dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, { name = "idna" }, - { name = "urllib3" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ @@ -4286,28 +4348,42 @@ wheels = [ [[package]] name = "ruff" -version = "0.12.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/eb/8c073deb376e46ae767f4961390d17545e8535921d2f65101720ed8bd434/ruff-0.12.10.tar.gz", hash = "sha256:189ab65149d11ea69a2d775343adf5f49bb2426fc4780f65ee33b423ad2e47f9", size = 5310076, upload-time = "2025-08-21T18:23:22.595Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/24/e7/560d049d15585d6c201f9eeacd2fd130def3741323e5ccf123786e0e3c95/ruff-0.12.10-py3-none-linux_armv6l.whl", hash = "sha256:8b593cb0fb55cc8692dac7b06deb29afda78c721c7ccfed22db941201b7b8f7b", size = 11935161, upload-time = "2025-08-21T18:22:26.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b0/ad2464922a1113c365d12b8f80ed70fcfb39764288ac77c995156080488d/ruff-0.12.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ebb7333a45d56efc7c110a46a69a1b32365d5c5161e7244aaf3aa20ce62399c1", size = 12660884, upload-time = "2025-08-21T18:22:30.925Z" }, - { url = "https://files.pythonhosted.org/packages/d7/f1/97f509b4108d7bae16c48389f54f005b62ce86712120fd8b2d8e88a7cb49/ruff-0.12.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d59e58586829f8e4a9920788f6efba97a13d1fa320b047814e8afede381c6839", size = 11872754, upload-time = "2025-08-21T18:22:34.035Z" }, - { url = "https://files.pythonhosted.org/packages/12/ad/44f606d243f744a75adc432275217296095101f83f966842063d78eee2d3/ruff-0.12.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822d9677b560f1fdeab69b89d1f444bf5459da4aa04e06e766cf0121771ab844", size = 12092276, upload-time = "2025-08-21T18:22:36.764Z" }, - { url = "https://files.pythonhosted.org/packages/06/1f/ed6c265e199568010197909b25c896d66e4ef2c5e1c3808caf461f6f3579/ruff-0.12.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b4a64f4062a50c75019c61c7017ff598cb444984b638511f48539d3a1c98db", size = 11734700, upload-time = "2025-08-21T18:22:39.822Z" }, - { url = "https://files.pythonhosted.org/packages/63/c5/b21cde720f54a1d1db71538c0bc9b73dee4b563a7dd7d2e404914904d7f5/ruff-0.12.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6f4064c69d2542029b2a61d39920c85240c39837599d7f2e32e80d36401d6e", size = 13468783, upload-time = "2025-08-21T18:22:42.559Z" }, - { url = "https://files.pythonhosted.org/packages/02/9e/39369e6ac7f2a1848f22fb0b00b690492f20811a1ac5c1fd1d2798329263/ruff-0.12.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:059e863ea3a9ade41407ad71c1de2badfbe01539117f38f763ba42a1206f7559", size = 14436642, upload-time = "2025-08-21T18:22:45.612Z" }, - { url = "https://files.pythonhosted.org/packages/e3/03/5da8cad4b0d5242a936eb203b58318016db44f5c5d351b07e3f5e211bb89/ruff-0.12.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bef6161e297c68908b7218fa6e0e93e99a286e5ed9653d4be71e687dff101cf", size = 13859107, upload-time = "2025-08-21T18:22:48.886Z" }, - { url = "https://files.pythonhosted.org/packages/19/19/dd7273b69bf7f93a070c9cec9494a94048325ad18fdcf50114f07e6bf417/ruff-0.12.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4f1345fbf8fb0531cd722285b5f15af49b2932742fc96b633e883da8d841896b", size = 12886521, upload-time = "2025-08-21T18:22:51.567Z" }, - { url = "https://files.pythonhosted.org/packages/c0/1d/b4207ec35e7babaee62c462769e77457e26eb853fbdc877af29417033333/ruff-0.12.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f68433c4fbc63efbfa3ba5db31727db229fa4e61000f452c540474b03de52a9", size = 13097528, upload-time = "2025-08-21T18:22:54.609Z" }, - { url = "https://files.pythonhosted.org/packages/ff/00/58f7b873b21114456e880b75176af3490d7a2836033779ca42f50de3b47a/ruff-0.12.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:141ce3d88803c625257b8a6debf4a0473eb6eed9643a6189b68838b43e78165a", size = 13080443, upload-time = "2025-08-21T18:22:57.413Z" }, - { url = "https://files.pythonhosted.org/packages/12/8c/9e6660007fb10189ccb78a02b41691288038e51e4788bf49b0a60f740604/ruff-0.12.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f3fc21178cd44c98142ae7590f42ddcb587b8e09a3b849cbc84edb62ee95de60", size = 11896759, upload-time = "2025-08-21T18:23:00.473Z" }, - { url = "https://files.pythonhosted.org/packages/67/4c/6d092bb99ea9ea6ebda817a0e7ad886f42a58b4501a7e27cd97371d0ba54/ruff-0.12.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7d1a4e0bdfafcd2e3e235ecf50bf0176f74dd37902f241588ae1f6c827a36c56", size = 11701463, upload-time = "2025-08-21T18:23:03.211Z" }, - { url = "https://files.pythonhosted.org/packages/59/80/d982c55e91df981f3ab62559371380616c57ffd0172d96850280c2b04fa8/ruff-0.12.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e67d96827854f50b9e3e8327b031647e7bcc090dbe7bb11101a81a3a2cbf1cc9", size = 12691603, upload-time = "2025-08-21T18:23:06.935Z" }, - { url = "https://files.pythonhosted.org/packages/ad/37/63a9c788bbe0b0850611669ec6b8589838faf2f4f959647f2d3e320383ae/ruff-0.12.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ae479e1a18b439c59138f066ae79cc0f3ee250712a873d00dbafadaad9481e5b", size = 13164356, upload-time = "2025-08-21T18:23:10.225Z" }, - { url = "https://files.pythonhosted.org/packages/47/d4/1aaa7fb201a74181989970ebccd12f88c0fc074777027e2a21de5a90657e/ruff-0.12.10-py3-none-win32.whl", hash = "sha256:9de785e95dc2f09846c5e6e1d3a3d32ecd0b283a979898ad427a9be7be22b266", size = 11896089, upload-time = "2025-08-21T18:23:14.232Z" }, - { url = "https://files.pythonhosted.org/packages/ad/14/2ad38fd4037daab9e023456a4a40ed0154e9971f8d6aed41bdea390aabd9/ruff-0.12.10-py3-none-win_amd64.whl", hash = "sha256:7837eca8787f076f67aba2ca559cefd9c5cbc3a9852fd66186f4201b87c1563e", size = 13004616, upload-time = "2025-08-21T18:23:17.422Z" }, - { url = "https://files.pythonhosted.org/packages/24/3c/21cf283d67af33a8e6ed242396863af195a8a6134ec581524fd22b9811b6/ruff-0.12.10-py3-none-win_arm64.whl", hash = "sha256:cc138cc06ed9d4bfa9d667a65af7172b47840e1a98b02ce7011c391e54635ffc", size = 12074225, upload-time = "2025-08-21T18:23:20.137Z" }, +version = "0.12.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/de/55/16ab6a7d88d93001e1ae4c34cbdcfb376652d761799459ff27c1dc20f6fa/ruff-0.12.11.tar.gz", hash = "sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d", size = 5347103, upload-time = "2025-08-28T13:59:08.87Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/a2/3b3573e474de39a7a475f3fbaf36a25600bfeb238e1a90392799163b64a0/ruff-0.12.11-py3-none-linux_armv6l.whl", hash = "sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065", size = 11979885, upload-time = "2025-08-28T13:58:26.654Z" }, + { url = "https://files.pythonhosted.org/packages/76/e4/235ad6d1785a2012d3ded2350fd9bc5c5af8c6f56820e696b0118dfe7d24/ruff-0.12.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93", size = 12742364, upload-time = "2025-08-28T13:58:30.256Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0d/15b72c5fe6b1e402a543aa9d8960e0a7e19dfb079f5b0b424db48b7febab/ruff-0.12.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd", size = 11920111, upload-time = "2025-08-28T13:58:33.677Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c0/f66339d7893798ad3e17fa5a1e587d6fd9806f7c1c062b63f8b09dda6702/ruff-0.12.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee", size = 12160060, upload-time = "2025-08-28T13:58:35.74Z" }, + { url = "https://files.pythonhosted.org/packages/03/69/9870368326db26f20c946205fb2d0008988aea552dbaec35fbacbb46efaa/ruff-0.12.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0", size = 11799848, upload-time = "2025-08-28T13:58:38.051Z" }, + { url = "https://files.pythonhosted.org/packages/25/8c/dd2c7f990e9b3a8a55eee09d4e675027d31727ce33cdb29eab32d025bdc9/ruff-0.12.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644", size = 13536288, upload-time = "2025-08-28T13:58:40.046Z" }, + { url = "https://files.pythonhosted.org/packages/7a/30/d5496fa09aba59b5e01ea76775a4c8897b13055884f56f1c35a4194c2297/ruff-0.12.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211", size = 14490633, upload-time = "2025-08-28T13:58:42.285Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2f/81f998180ad53445d403c386549d6946d0748e536d58fce5b5e173511183/ruff-0.12.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793", size = 13888430, upload-time = "2025-08-28T13:58:44.641Z" }, + { url = "https://files.pythonhosted.org/packages/87/71/23a0d1d5892a377478c61dbbcffe82a3476b050f38b5162171942a029ef3/ruff-0.12.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee", size = 12913133, upload-time = "2025-08-28T13:58:47.039Z" }, + { url = "https://files.pythonhosted.org/packages/80/22/3c6cef96627f89b344c933781ed38329bfb87737aa438f15da95907cbfd5/ruff-0.12.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8", size = 13169082, upload-time = "2025-08-28T13:58:49.157Z" }, + { url = "https://files.pythonhosted.org/packages/05/b5/68b3ff96160d8b49e8dd10785ff3186be18fd650d356036a3770386e6c7f/ruff-0.12.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f", size = 13139490, upload-time = "2025-08-28T13:58:51.593Z" }, + { url = "https://files.pythonhosted.org/packages/59/b9/050a3278ecd558f74f7ee016fbdf10591d50119df8d5f5da45a22c6afafc/ruff-0.12.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000", size = 11958928, upload-time = "2025-08-28T13:58:53.943Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bc/93be37347db854806904a43b0493af8d6873472dfb4b4b8cbb27786eb651/ruff-0.12.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2", size = 11764513, upload-time = "2025-08-28T13:58:55.976Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a1/1471751e2015a81fd8e166cd311456c11df74c7e8769d4aabfbc7584c7ac/ruff-0.12.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39", size = 12745154, upload-time = "2025-08-28T13:58:58.16Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/2542b14890d0f4872dd81b7b2a6aed3ac1786fae1ce9b17e11e6df9e31e3/ruff-0.12.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9", size = 13227653, upload-time = "2025-08-28T13:59:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/2fbfc61047dbfd009c58a28369a693a1484ad15441723be1cd7fe69bb679/ruff-0.12.11-py3-none-win32.whl", hash = "sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3", size = 11944270, upload-time = "2025-08-28T13:59:02.347Z" }, + { url = "https://files.pythonhosted.org/packages/08/a5/34276984705bfe069cd383101c45077ee029c3fe3b28225bf67aa35f0647/ruff-0.12.11-py3-none-win_amd64.whl", hash = "sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd", size = 13046600, upload-time = "2025-08-28T13:59:04.751Z" }, + { url = "https://files.pythonhosted.org/packages/84/a8/001d4a7c2b37623a3fd7463208267fb906df40ff31db496157549cfd6e72/ruff-0.12.11-py3-none-win_arm64.whl", hash = "sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea", size = 12135290, upload-time = "2025-08-28T13:59:06.933Z" }, +] + +[[package]] +name = "s3fs" +version = "2025.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiobotocore" }, + { name = "aiohttp" }, + { name = "fsspec" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/13/37438c4672ba1d23ec46df0e4b57e98469e5c5f4f98313cf6842b631652b/s3fs-2025.7.0.tar.gz", hash = "sha256:5e7f9ec0cad7745155e3eb86fae15b1481fa29946bf5b3a4ce3a60701ce6022d", size = 77795, upload-time = "2025-07-15T16:35:22.177Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/c7/30d13b7fd4f866ca3f30e9a6e7ae038f0c45226f6e26b3cc98d6d197f93b/s3fs-2025.7.0-py3-none-any.whl", hash = "sha256:b6b2d3f84b6aa1c2ba5e62e39dd9410cf54f10a2cce1ea6db1ba0d1a6bcce685", size = 30315, upload-time = "2025-07-15T16:35:20.734Z" }, ] [[package]] @@ -4679,7 +4755,7 @@ dependencies = [ { name = "pygments", marker = "python_full_version >= '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, - { name = "urllib3", marker = "python_full_version >= '3.10'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/34/fe/ac4e24f35b5148b31ac717ae7dcc7a2f7ec56eb729e22c7252ed8ad2d9a5/sphinx_prompt-1.9.0.tar.gz", hash = "sha256:471b3c6d466dce780a9b167d9541865fd4e9a80ed46e31b06a52a0529ae995a1", size = 5340, upload-time = "2024-08-07T15:46:51.428Z" } wheels = [ @@ -4881,11 +4957,11 @@ asyncio = [ [[package]] name = "sqlglot" -version = "27.8.0" +version = "27.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/d6/dbe5a442ba5f0badf5d82f97fd4b83a7045bde563430d1bbfb90e7da5b71/sqlglot-27.8.0.tar.gz", hash = "sha256:026ca21be0106d23f67519d583a24131d27131ceb80b595efa2a59a2746f351f", size = 5418660, upload-time = "2025-08-19T11:54:29.292Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/63/49797c32bac570ed3b06c5b6b7a8361d432cd43340ccadcce8162be21bb9/sqlglot-27.10.0.tar.gz", hash = "sha256:77af99ca743b44e9469a6ded954fe9111ff5449166cc09df452b6fd0ed01fb6d", size = 5437988, upload-time = "2025-08-28T21:08:16.115Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/29/ffa987296beffe2ae7fc83c6fd9a62166d0abc4d2d16600605a5864c7d7f/sqlglot-27.8.0-py3-none-any.whl", hash = "sha256:3961277277bc5bae459762294e160b6b7ce998e7d016f5adf8311a1d50b7a1a7", size = 501092, upload-time = "2025-08-19T11:54:27.17Z" }, + { url = "https://files.pythonhosted.org/packages/67/8c/a7547d8cb904d9ab0478912ed2ce2444d0fe955403db2d54cf5de076f1c9/sqlglot-27.10.0-py3-none-any.whl", hash = "sha256:3981cf61d57cbab6da254283b272387c2dc1f239ec8fe8d637b3b67f3bf5cead", size = 506563, upload-time = "2025-08-28T21:08:13.424Z" }, ] [package.optional-dependencies] @@ -4962,7 +5038,7 @@ wheels = [ [[package]] name = "sqlspec" -version = "0.21.1" +version = "0.22.0" source = { editable = "." } dependencies = [ { name = "eval-type-backport", marker = "python_full_version < '3.10'" }, @@ -5100,6 +5176,7 @@ dev = [ { name = "bump-my-version" }, { name = "coverage" }, { name = "duckdb" }, + { name = "fsspec", extra = ["s3"] }, { name = "hatch-mypyc" }, { name = "mypy" }, { name = "myst-parser", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -5174,6 +5251,7 @@ extras = [ { name = "adbc-driver-manager" }, { name = "adbc-driver-postgresql" }, { name = "adbc-driver-sqlite" }, + { name = "fsspec", extra = ["s3"] }, { name = "pgvector" }, { name = "polars" }, { name = "pyarrow" }, @@ -5279,6 +5357,7 @@ dev = [ { name = "bump-my-version" }, { name = "coverage", specifier = ">=7.6.1" }, { name = "duckdb" }, + { name = "fsspec", extras = ["s3"] }, { name = "hatch-mypyc" }, { name = "mypy", specifier = ">=1.13.0" }, { name = "myst-parser" }, @@ -5343,6 +5422,7 @@ extras = [ { name = "adbc-driver-manager" }, { name = "adbc-driver-postgresql" }, { name = "adbc-driver-sqlite" }, + { name = "fsspec", extras = ["s3"] }, { name = "pgvector" }, { name = "polars" }, { name = "pyarrow" }, @@ -5570,10 +5650,29 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380, upload-time = "2024-08-29T15:43:11.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225, upload-time = "2024-08-29T15:43:08.921Z" }, +] + [[package]] name = "urllib3" version = "2.5.0" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version == '3.12.*'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },