diff --git a/pyproject.toml b/pyproject.toml index d14e4ef19..449f1b71e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -360,8 +360,8 @@ exclude = ["**/node_modules", "**/__pycache__", ".venv", "tools", "docs", "tmp", include = ["sqlspec", "tests"] pythonVersion = "3.9" reportMissingTypeStubs = false -reportPrivateImportUsage = false -reportPrivateUsage = false +reportPrivateImportUsage = true +reportPrivateUsage = true reportTypedDictNotRequiredAccess = false reportUnknownArgumentType = false reportUnnecessaryCast = false diff --git a/sqlspec/_sql.py b/sqlspec/_sql.py index 58f8841d7..7a0e930f2 100644 --- a/sqlspec/_sql.py +++ b/sqlspec/_sql.py @@ -170,7 +170,7 @@ def __call__(self, statement: str, dialect: DialectType = None) -> "Any": actual_type_str == "WITH" and parsed_expr.this and isinstance(parsed_expr.this, exp.Select) ): builder = Select(dialect=dialect or self.dialect) - builder._expression = parsed_expr + builder.set_expression(parsed_expr) return builder if actual_type_str in {"INSERT", "UPDATE", "DELETE"} and parsed_expr.args.get("returning") is not None: @@ -451,7 +451,7 @@ def _populate_insert_from_sql(self, builder: "Insert", sql_string: str) -> "Inse parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect) if isinstance(parsed_expr, exp.Insert): - builder._expression = parsed_expr + builder.set_expression(parsed_expr) return builder if isinstance(parsed_expr, exp.Select): @@ -470,7 +470,7 @@ def _populate_select_from_sql(self, builder: "Select", sql_string: str) -> "Sele parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect) if isinstance(parsed_expr, exp.Select): - builder._expression = parsed_expr + builder.set_expression(parsed_expr) return builder logger.warning("Cannot create SELECT from %s statement", type(parsed_expr).__name__) @@ -485,7 +485,7 @@ def _populate_update_from_sql(self, builder: "Update", sql_string: str) -> "Upda parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect) if isinstance(parsed_expr, exp.Update): - builder._expression = parsed_expr + builder.set_expression(parsed_expr) return builder logger.warning("Cannot create UPDATE from %s statement", type(parsed_expr).__name__) @@ -500,7 +500,7 @@ def _populate_delete_from_sql(self, builder: "Delete", sql_string: str) -> "Dele parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect) if isinstance(parsed_expr, exp.Delete): - builder._expression = parsed_expr + builder.set_expression(parsed_expr) return builder logger.warning("Cannot create DELETE from %s statement", type(parsed_expr).__name__) @@ -515,7 +515,7 @@ def _populate_merge_from_sql(self, builder: "Merge", sql_string: str) -> "Merge" parsed_expr: exp.Expression = exp.maybe_parse(sql_string, dialect=self.dialect) if isinstance(parsed_expr, exp.Merge): - builder._expression = parsed_expr + builder.set_expression(parsed_expr) return builder logger.warning("Cannot create MERGE from %s statement", type(parsed_expr).__name__) @@ -724,19 +724,15 @@ def raw(sql_fragment: str, **parameters: Any) -> "Union[exp.Expression, SQL]": if not parameters: try: parsed: exp.Expression = exp.maybe_parse(sql_fragment) - return parsed - if sql_fragment.strip().replace("_", "").replace(".", "").isalnum(): - return exp.to_identifier(sql_fragment) - return exp.Literal.string(sql_fragment) except Exception as e: msg = f"Failed to parse raw SQL fragment '{sql_fragment}': {e}" raise SQLBuilderError(msg) from e + return parsed return SQL(sql_fragment, parameters) - @staticmethod def count( - column: Union[str, exp.Expression, "ExpressionWrapper", "Case", "Column"] = "*", distinct: bool = False + self, column: Union[str, exp.Expression, "ExpressionWrapper", "Case", "Column"] = "*", distinct: bool = False ) -> AggregateExpression: """Create a COUNT expression. @@ -750,7 +746,7 @@ def count( if isinstance(column, str) and column == "*": expr = exp.Count(this=exp.Star(), distinct=distinct) else: - col_expr = SQLFactory._extract_expression(column) + col_expr = self._extract_expression(column) expr = exp.Count(this=col_expr, distinct=distinct) return AggregateExpression(expr) @@ -1068,11 +1064,11 @@ def _extract_expression(value: Any) -> exp.Expression: if isinstance(value, str): return exp.column(value) if isinstance(value, Column): - return value._expression + return value.sqlglot_expression if isinstance(value, ExpressionWrapper): return value.expression if isinstance(value, Case): - return exp.Case(ifs=value._conditions, default=value._default) + return exp.Case(ifs=value.conditions, default=value.default) if isinstance(value, exp.Expression): return value return exp.convert(value) diff --git a/sqlspec/adapters/adbc/driver.py b/sqlspec/adapters/adbc/driver.py index 7eaac9f1f..69d649502 100644 --- a/sqlspec/adapters/adbc/driver.py +++ b/sqlspec/adapters/adbc/driver.py @@ -521,7 +521,7 @@ def _execute_many(self, cursor: "Cursor", statement: SQL) -> "ExecutionResult": try: if not prepared_parameters: - cursor._rowcount = 0 + cursor._rowcount = 0 # pyright: ignore[reportPrivateUsage] row_count = 0 elif isinstance(prepared_parameters, list) and prepared_parameters: processed_params = [] @@ -596,7 +596,7 @@ def _execute_script(self, cursor: "Cursor", statement: "SQL") -> "ExecutionResul Execution result with statement counts """ if statement.is_script: - sql = statement._raw_sql + sql = statement.raw_sql prepared_parameters: list[Any] = [] else: sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config) diff --git a/sqlspec/adapters/oracledb/driver.py b/sqlspec/adapters/oracledb/driver.py index 19ef4d540..11e9ccecb 100644 --- a/sqlspec/adapters/oracledb/driver.py +++ b/sqlspec/adapters/oracledb/driver.py @@ -286,6 +286,11 @@ def _execute_many(self, cursor: Any, statement: "SQL") -> "ExecutionResult": msg = "execute_many requires parameters" raise ValueError(msg) + # Oracle-specific fix: Ensure parameters are in list format for executemany + # Oracle expects a list of sequences, not a tuple of sequences + if isinstance(prepared_parameters, tuple): + prepared_parameters = list(prepared_parameters) + cursor.executemany(sql, prepared_parameters) # Calculate affected rows based on parameter count diff --git a/sqlspec/adapters/psycopg/config.py b/sqlspec/adapters/psycopg/config.py index cf5237c27..80f4509d1 100644 --- a/sqlspec/adapters/psycopg/config.py +++ b/sqlspec/adapters/psycopg/config.py @@ -173,8 +173,7 @@ def _close_pool(self) -> None: logger.info("Closing Psycopg connection pool", extra={"adapter": "psycopg"}) try: - if hasattr(self.pool_instance, "_closed"): - self.pool_instance._closed = True + self.pool_instance._closed = True # pyright: ignore[reportPrivateUsage] self.pool_instance.close() logger.info("Psycopg connection pool closed successfully", extra={"adapter": "psycopg"}) @@ -350,8 +349,7 @@ async def _close_pool(self) -> None: return try: - if hasattr(self.pool_instance, "_closed"): - self.pool_instance._closed = True + self.pool_instance._closed = True # pyright: ignore[reportPrivateUsage] await self.pool_instance.close() finally: diff --git a/sqlspec/base.py b/sqlspec/base.py index d110ea453..2865f6c68 100644 --- a/sqlspec/base.py +++ b/sqlspec/base.py @@ -15,9 +15,8 @@ ) from sqlspec.core.cache import ( CacheConfig, - CacheStatsAggregate, get_cache_config, - get_cache_stats, + get_cache_statistics, log_cache_stats, reset_cache_stats, update_cache_config, @@ -532,13 +531,13 @@ def update_cache_config(config: CacheConfig) -> None: update_cache_config(config) @staticmethod - def get_cache_stats() -> CacheStatsAggregate: + def get_cache_stats() -> "dict[str, Any]": """Get current cache statistics. Returns: Cache statistics object with detailed metrics. """ - return get_cache_stats() + return get_cache_statistics() @staticmethod def reset_cache_stats() -> None: diff --git a/sqlspec/builder/_base.py b/sqlspec/builder/_base.py index 5df4719a5..110290585 100644 --- a/sqlspec/builder/_base.py +++ b/sqlspec/builder/_base.py @@ -13,7 +13,7 @@ from sqlglot.optimizer import optimize from typing_extensions import Self -from sqlspec.core.cache import CacheKey, get_cache_config, get_default_cache +from sqlspec.core.cache import get_cache, get_cache_config from sqlspec.core.hashing import hash_optimized_expression from sqlspec.core.parameters import ParameterStyle, ParameterStyleConfig from sqlspec.core.statement import SQL, StatementConfig @@ -91,6 +91,36 @@ def _initialize_expression(self) -> None: "QueryBuilder._create_base_expression must return a valid sqlglot expression." ) + def get_expression(self) -> Optional[exp.Expression]: + """Get expression reference (no copy). + + Returns: + The current SQLGlot expression or None if not set + """ + return self._expression + + def set_expression(self, expression: exp.Expression) -> None: + """Set expression with validation. + + Args: + expression: SQLGlot expression to set + + Raises: + TypeError: If expression is not a SQLGlot Expression + """ + if not isinstance(expression, exp.Expression): + msg = f"Expected Expression, got {type(expression)}" + raise TypeError(msg) + self._expression = expression + + def has_expression(self) -> bool: + """Check if expression exists. + + Returns: + True if expression is set, False otherwise + """ + return self._expression is not None + @abstractmethod def _create_base_expression(self) -> exp.Expression: """Create the base sqlglot expression for the specific query type. @@ -307,12 +337,13 @@ def with_cte(self: Self, alias: str, query: "Union[QueryBuilder, exp.Select, str cte_select_expression: exp.Select if isinstance(query, QueryBuilder): - if query._expression is None: + query_expr = query.get_expression() + if query_expr is None: self._raise_sql_builder_error("CTE query builder has no expression.") - if not isinstance(query._expression, exp.Select): - msg = f"CTE query builder expression must be a Select, got {type(query._expression).__name__}." + if not isinstance(query_expr, exp.Select): + msg = f"CTE query builder expression must be a Select, got {type(query_expr).__name__}." self._raise_sql_builder_error(msg) - cte_select_expression = query._expression + cte_select_expression = query_expr param_mapping = self._merge_cte_parameters(alias, query.parameters) updated_expression = self._update_placeholders_in_expression(cte_select_expression, param_mapping) if not isinstance(updated_expression, exp.Select): @@ -398,9 +429,8 @@ def _optimize_expression(self, expression: exp.Expression) -> exp.Expression: expression, dialect=dialect_name, schema=self.schema, optimizer_settings=optimizer_settings ) - cache_key_obj = CacheKey((cache_key,)) - unified_cache = get_default_cache() - cached_optimized = unified_cache.get(cache_key_obj) + cache = get_cache() + cached_optimized = cache.get("optimized", cache_key) if cached_optimized: return cast("exp.Expression", cached_optimized) @@ -409,7 +439,7 @@ def _optimize_expression(self, expression: exp.Expression) -> exp.Expression: expression, schema=self.schema, dialect=self.dialect_name, optimizer_settings=optimizer_settings ) - unified_cache.put(cache_key_obj, optimized) + cache.put("optimized", cache_key, optimized) except Exception: return expression @@ -430,15 +460,14 @@ def to_statement(self, config: "Optional[StatementConfig]" = None) -> "SQL": return self._to_statement(config) cache_key_str = self._generate_builder_cache_key(config) - cache_key = CacheKey((cache_key_str,)) - unified_cache = get_default_cache() - cached_sql = unified_cache.get(cache_key) + cache = get_cache() + cached_sql = cache.get("builder", cache_key_str) if cached_sql is not None: return cast("SQL", cached_sql) sql_statement = self._to_statement(config) - unified_cache.put(cache_key, sql_statement) + cache.put("builder", cache_key_str, sql_statement) return sql_statement @@ -531,3 +560,16 @@ def _merge_sql_object_parameters(self, sql_obj: Any) -> None: def parameters(self) -> dict[str, Any]: """Public access to query parameters.""" return self._parameters + + def set_parameters(self, parameters: dict[str, Any]) -> None: + """Set query parameters (public API).""" + self._parameters = parameters.copy() + + @property + def with_ctes(self) -> "dict[str, exp.CTE]": + """Get WITH clause CTEs (public API).""" + return dict(self._with_ctes) + + def generate_unique_parameter_name(self, base_name: str) -> str: + """Generate unique parameter name (public API).""" + return self._generate_unique_parameter_name(base_name) diff --git a/sqlspec/builder/_column.py b/sqlspec/builder/_column.py index fc3bceb57..891eefa32 100644 --- a/sqlspec/builder/_column.py +++ b/sqlspec/builder/_column.py @@ -254,6 +254,15 @@ def __hash__(self) -> int: """Hash based on table and column name.""" return hash((self.table, self.name)) + @property + def sqlglot_expression(self) -> exp.Expression: + """Get the underlying SQLGlot expression (public API). + + Returns: + The SQLGlot expression for this column + """ + return self._expression + class FunctionColumn: """Represents the result of a SQL function call on a column.""" diff --git a/sqlspec/builder/_ddl.py b/sqlspec/builder/_ddl.py index 2b9c3a725..8ec48e7ef 100644 --- a/sqlspec/builder/_ddl.py +++ b/sqlspec/builder/_ddl.py @@ -973,10 +973,10 @@ def _create_base_expression(self) -> exp.Expression: select_expr = self._select_query.expression select_parameters = self._select_query.parameters elif isinstance(self._select_query, Select): - select_expr = self._select_query._expression - select_parameters = self._select_query._parameters + select_expr = self._select_query.get_expression() + select_parameters = self._select_query.parameters - with_ctes = self._select_query._with_ctes + with_ctes = self._select_query.with_ctes if with_ctes and select_expr and isinstance(select_expr, exp.Select): for alias, cte in with_ctes.items(): if has_with_method(select_expr): @@ -1100,8 +1100,8 @@ def _create_base_expression(self) -> exp.Expression: select_expr = self._select_query.expression select_parameters = self._select_query.parameters elif isinstance(self._select_query, Select): - select_expr = self._select_query._expression - select_parameters = self._select_query._parameters + select_expr = self._select_query.get_expression() + select_parameters = self._select_query.parameters elif isinstance(self._select_query, str): select_expr = exp.maybe_parse(self._select_query) select_parameters = None @@ -1198,8 +1198,8 @@ def _create_base_expression(self) -> exp.Expression: select_expr = self._select_query.expression select_parameters = self._select_query.parameters elif isinstance(self._select_query, Select): - select_expr = self._select_query._expression - select_parameters = self._select_query._parameters + select_expr = self._select_query.get_expression() + select_parameters = self._select_query.parameters elif isinstance(self._select_query, str): select_expr = exp.maybe_parse(self._select_query) select_parameters = None diff --git a/sqlspec/builder/_insert.py b/sqlspec/builder/_insert.py index 74473336e..7d375047b 100644 --- a/sqlspec/builder/_insert.py +++ b/sqlspec/builder/_insert.py @@ -90,6 +90,10 @@ def _get_insert_expression(self) -> exp.Insert: raise SQLBuilderError(ERR_MSG_INTERNAL_EXPRESSION_TYPE) return self._expression + def get_insert_expression(self) -> exp.Insert: + """Get the insert expression (public API).""" + return self._get_insert_expression() + def values(self, *values: Any, **kwargs: Any) -> "Self": """Adds a row of values to the INSERT statement. @@ -129,7 +133,7 @@ def values(self, *values: Any, **kwargs: Any) -> "Self": if hasattr(values_0, "items") and hasattr(values_0, "keys"): return self.values_from_dict(values_0) - insert_expr = self._get_insert_expression() + insert_expr = self.get_insert_expression() if self._columns and len(values) != len(self._columns): msg = ERR_MSG_VALUES_COLUMNS_MISMATCH.format(values_len=len(values), columns_len=len(self._columns)) @@ -160,9 +164,9 @@ def values(self, *values: Any, **kwargs: Any) -> "Self": if self._columns and i < len(self._columns): column_str = str(self._columns[i]) column_name = column_str.rsplit(".", maxsplit=1)[-1] if "." in column_str else column_str - param_name = self._generate_unique_parameter_name(column_name) + param_name = self.generate_unique_parameter_name(column_name) else: - param_name = self._generate_unique_parameter_name(f"value_{i + 1}") + param_name = self.generate_unique_parameter_name(f"value_{i + 1}") _, param_name = self.add_parameter(value, name=param_name) value_placeholders.append(exp.Placeholder(this=param_name)) @@ -336,7 +340,7 @@ def do_nothing(self) -> "Insert": ).do_nothing() ``` """ - insert_expr = self._insert_builder._get_insert_expression() + insert_expr = self._insert_builder.get_insert_expression() # Create ON CONFLICT with proper structure conflict_keys = [exp.to_identifier(col) for col in self._columns] if self._columns else None @@ -363,7 +367,7 @@ def do_update(self, **kwargs: Any) -> "Insert": ) ``` """ - insert_expr = self._insert_builder._get_insert_expression() + insert_expr = self._insert_builder.get_insert_expression() # Create SET expressions for the UPDATE set_expressions = [] @@ -394,7 +398,7 @@ def do_update(self, **kwargs: Any) -> "Insert": value_expr = val else: # Create parameter for regular values - param_name = self._insert_builder._generate_unique_parameter_name(col) + param_name = self._insert_builder.generate_unique_parameter_name(col) _, param_name = self._insert_builder.add_parameter(val, name=param_name) value_expr = exp.Placeholder(this=param_name) diff --git a/sqlspec/builder/_parsing_utils.py b/sqlspec/builder/_parsing_utils.py index 5558aa9cf..60af2ab43 100644 --- a/sqlspec/builder/_parsing_utils.py +++ b/sqlspec/builder/_parsing_utils.py @@ -18,6 +18,27 @@ ) +def extract_column_name(column: Union[str, exp.Column]) -> str: + """Extract column name from column expression for parameter naming. + + Args: + column: Column expression (string or SQLGlot Column) + + Returns: + Column name as string for use as parameter name + """ + if isinstance(column, str): + if "." in column: + return column.split(".")[-1] + return column + if isinstance(column, exp.Column): + try: + return str(column.this.this) + except AttributeError: + return str(column.this) if column.this else "column" + return "column" + + def parse_column_expression( column_input: Union[str, exp.Expression, Any], builder: Optional[Any] = None ) -> exp.Expression: @@ -139,10 +160,8 @@ def parse_condition_expression( if value is None: return exp.Is(this=column_expr, expression=exp.null()) if builder and has_parameter_builder(builder): - from sqlspec.builder.mixins._where_clause import _extract_column_name - - column_name = _extract_column_name(column) - param_name = builder._generate_unique_parameter_name(column_name) + column_name = extract_column_name(column) + param_name = builder.generate_unique_parameter_name(column_name) _, param_name = builder.add_parameter(value, name=param_name) return exp.EQ(this=column_expr, expression=exp.Placeholder(this=param_name)) if isinstance(value, str): diff --git a/sqlspec/builder/_update.py b/sqlspec/builder/_update.py index ca0b1af0b..79e03c5d2 100644 --- a/sqlspec/builder/_update.py +++ b/sqlspec/builder/_update.py @@ -131,7 +131,7 @@ def join( subquery_exp = exp.paren(exp.maybe_parse(subquery.sql, dialect=self.dialect)) table_expr = exp.alias_(subquery_exp, alias) if alias else subquery_exp - subquery_parameters = table._parameters + subquery_parameters = table.parameters if subquery_parameters: for p_name, p_value in subquery_parameters.items(): self.add_parameter(p_value, name=p_name) diff --git a/sqlspec/builder/mixins/_cte_and_set_ops.py b/sqlspec/builder/mixins/_cte_and_set_ops.py index 9db07bc13..0763fc703 100644 --- a/sqlspec/builder/mixins/_cte_and_set_ops.py +++ b/sqlspec/builder/mixins/_cte_and_set_ops.py @@ -1,10 +1,11 @@ +# pyright: reportPrivateUsage=false """CTE and set operation mixins. Provides mixins for Common Table Expressions (WITH clause) and set operations (UNION, INTERSECT, EXCEPT). """ -from typing import Any, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union, cast from mypy_extensions import trait from sqlglot import exp @@ -12,6 +13,9 @@ from sqlspec.exceptions import SQLBuilderError +if TYPE_CHECKING: + from sqlspec.builder._base import QueryBuilder + __all__ = ("CommonTableExpressionMixin", "SetOperationMixin") @@ -20,8 +24,10 @@ class CommonTableExpressionMixin: """Mixin providing WITH clause (Common Table Expressions) support for SQL builders.""" __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... _with_ctes: Any # Provided by QueryBuilder dialect: Any # Provided by QueryBuilder @@ -60,12 +66,14 @@ def with_( Returns: The current builder instance for method chaining. """ - if self._expression is None: + builder = cast("QueryBuilder", self) + expression = builder.get_expression() + if expression is None: msg = "Cannot add WITH clause: expression not initialized." raise SQLBuilderError(msg) - if not isinstance(self._expression, (exp.Select, exp.Insert, exp.Update, exp.Delete)): - msg = f"Cannot add WITH clause to {type(self._expression).__name__} expression." + if not isinstance(expression, (exp.Select, exp.Insert, exp.Update, exp.Delete)): + msg = f"Cannot add WITH clause to {type(expression).__name__} expression." raise SQLBuilderError(msg) cte_expr: Optional[exp.Expression] = None @@ -103,19 +111,18 @@ def with_( else: cte_alias_expr = exp.alias_(cte_expr, name) - existing_with = self._expression.args.get("with") + existing_with = expression.args.get("with") if existing_with: existing_with.expressions.append(cte_alias_expr) if recursive: existing_with.set("recursive", recursive) else: # Only SELECT, INSERT, UPDATE support WITH clauses - if hasattr(self._expression, "with_") and isinstance( - self._expression, (exp.Select, exp.Insert, exp.Update) - ): - self._expression = self._expression.with_(cte_alias_expr, as_=name, copy=False) + if hasattr(expression, "with_") and isinstance(expression, (exp.Select, exp.Insert, exp.Update)): + updated_expression = expression.with_(cte_alias_expr, as_=name, copy=False) + builder.set_expression(updated_expression) if recursive: - with_clause = self._expression.find(exp.With) + with_clause = updated_expression.find(exp.With) if with_clause: with_clause.set("recursive", recursive) self._with_ctes[name] = exp.CTE(this=cte_expr, alias=exp.to_table(name)) @@ -128,10 +135,12 @@ class SetOperationMixin: """Mixin providing set operations (UNION, INTERSECT, EXCEPT) for SELECT builders.""" __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] - _parameters: dict[str, Any] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... + def set_parameters(self, parameters: "dict[str, Any]") -> None: ... + dialect: Any = None def build(self) -> Any: @@ -162,7 +171,7 @@ def union(self, other: Any, all_: bool = False) -> Self: union_expr = exp.union(left_expr, right_expr, distinct=not all_) new_builder = type(self)() new_builder.dialect = self.dialect - new_builder._expression = union_expr + cast("QueryBuilder", new_builder).set_expression(union_expr) merged_parameters = dict(left_query.parameters) for param_name, param_value in right_query.parameters.items(): if param_name in merged_parameters: @@ -181,11 +190,11 @@ def rename_parameter( right_expr = right_expr.transform(rename_parameter) union_expr = exp.union(left_expr, right_expr, distinct=not all_) - new_builder._expression = union_expr + cast("QueryBuilder", new_builder).set_expression(union_expr) merged_parameters[new_param_name] = param_value else: merged_parameters[param_name] = param_value - new_builder._parameters = merged_parameters + new_builder.set_parameters(merged_parameters) return new_builder def intersect(self, other: Any) -> Self: @@ -210,10 +219,10 @@ def intersect(self, other: Any) -> Self: intersect_expr = exp.intersect(left_expr, right_expr, distinct=True) new_builder = type(self)() new_builder.dialect = self.dialect - new_builder._expression = intersect_expr + cast("QueryBuilder", new_builder).set_expression(intersect_expr) merged_parameters = dict(left_query.parameters) merged_parameters.update(right_query.parameters) - new_builder._parameters = merged_parameters + new_builder.set_parameters(merged_parameters) return new_builder def except_(self, other: Any) -> Self: @@ -238,8 +247,8 @@ def except_(self, other: Any) -> Self: except_expr = exp.except_(left_expr, right_expr) new_builder = type(self)() new_builder.dialect = self.dialect - new_builder._expression = except_expr + cast("QueryBuilder", new_builder).set_expression(except_expr) merged_parameters = dict(left_query.parameters) merged_parameters.update(right_query.parameters) - new_builder._parameters = merged_parameters + new_builder.set_parameters(merged_parameters) return new_builder diff --git a/sqlspec/builder/mixins/_delete_operations.py b/sqlspec/builder/mixins/_delete_operations.py index ae04b1534..ae5c13b7c 100644 --- a/sqlspec/builder/mixins/_delete_operations.py +++ b/sqlspec/builder/mixins/_delete_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """DELETE operation mixins. Provides mixins for DELETE statement functionality including @@ -21,8 +22,9 @@ class DeleteFromClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def from_(self, table: str) -> Self: """Set the target table for the DELETE statement. @@ -33,13 +35,16 @@ def from_(self, table: str) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Delete() - if not isinstance(self._expression, exp.Delete): - current_expr_type = type(self._expression).__name__ + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Delete()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Delete): + current_expr_type = type(current_expr).__name__ msg = f"Base expression for Delete is {current_expr_type}, expected Delete." raise SQLBuilderError(msg) setattr(self, "_table", table) - self._expression.set("this", exp.to_table(table)) + current_expr.set("this", exp.to_table(table)) return self diff --git a/sqlspec/builder/mixins/_insert_operations.py b/sqlspec/builder/mixins/_insert_operations.py index 04b28aea4..896c94373 100644 --- a/sqlspec/builder/mixins/_insert_operations.py +++ b/sqlspec/builder/mixins/_insert_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """INSERT operation mixins. Provides mixins for INSERT statement functionality including @@ -25,8 +26,9 @@ class InsertIntoClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def into(self, table: str) -> Self: """Set the target table for the INSERT statement. @@ -40,14 +42,17 @@ def into(self, table: str) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Insert() - if not isinstance(self._expression, exp.Insert): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Insert()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Insert): msg = "Cannot set target table on a non-INSERT expression." raise SQLBuilderError(msg) setattr(self, "_table", table) - self._expression.set("this", exp.to_table(table)) + current_expr.set("this", exp.to_table(table)) return self @@ -57,8 +62,9 @@ class InsertValuesMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... _columns: Any # Provided by QueryBuilder @@ -74,14 +80,17 @@ def _generate_unique_parameter_name(self, base_name: str) -> str: def columns(self, *columns: Union[str, exp.Expression]) -> Self: """Set the columns for the INSERT statement and synchronize the _columns attribute on the builder.""" - if self._expression is None: - self._expression = exp.Insert() - if not isinstance(self._expression, exp.Insert): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Insert()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Insert): msg = "Cannot set columns on a non-INSERT expression." raise SQLBuilderError(msg) # Get the current table from the expression - current_this = self._expression.args.get("this") + current_this = current_expr.args.get("this") if current_this is None: msg = "Table must be set using .into() before setting columns." raise SQLBuilderError(msg) @@ -95,11 +104,11 @@ def columns(self, *columns: Union[str, exp.Expression]) -> Self: # Create Schema object with table and columns schema = exp.Schema(this=table_name, expressions=column_identifiers) - self._expression.set("this", schema) + current_expr.set("this", schema) # No columns specified - ensure we have just a Table object elif isinstance(current_this, exp.Schema): table_name = current_this.this - self._expression.set("this", exp.Table(this=table_name)) + current_expr.set("this", exp.Table(this=table_name)) try: cols = self._columns @@ -126,9 +135,12 @@ def values(self, *values: Any, **kwargs: Any) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Insert() - if not isinstance(self._expression, exp.Insert): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Insert()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Insert): msg = "Cannot add values to a non-INSERT expression." raise SQLBuilderError(msg) @@ -137,8 +149,8 @@ def values(self, *values: Any, **kwargs: Any) -> Self: msg = "Cannot mix positional values with keyword values." raise SQLBuilderError(msg) try: - _columns = self._columns - if not _columns: + cols = self._columns + if not cols: self.columns(*kwargs.keys()) except AttributeError: pass @@ -156,8 +168,8 @@ def values(self, *values: Any, **kwargs: Any) -> Self: elif len(values) == 1 and hasattr(values[0], "items"): mapping = values[0] try: - _columns = self._columns - if not _columns: + cols = self._columns + if not cols: self.columns(*mapping.keys()) except AttributeError: pass @@ -174,9 +186,9 @@ def values(self, *values: Any, **kwargs: Any) -> Self: row_exprs.append(exp.Placeholder(this=param_name)) else: try: - _columns = self._columns - if _columns and len(values) != len(_columns): - msg = f"Number of values ({len(values)}) does not match the number of specified columns ({len(_columns)})." + cols = self._columns + if cols and len(values) != len(cols): + msg = f"Number of values ({len(values)}) does not match the number of specified columns ({len(cols)})." raise SQLBuilderError(msg) except AttributeError: pass @@ -186,11 +198,9 @@ def values(self, *values: Any, **kwargs: Any) -> Self: row_exprs.append(v) else: try: - _columns = self._columns - if _columns and i < len(_columns): - column_name = ( - str(_columns[i]).split(".")[-1] if "." in str(_columns[i]) else str(_columns[i]) - ) + cols = self._columns + if cols and i < len(cols): + column_name = str(cols[i]).split(".")[-1] if "." in str(cols[i]) else str(cols[i]) param_name = self._generate_unique_parameter_name(column_name) else: param_name = self._generate_unique_parameter_name(f"value_{i + 1}") @@ -200,7 +210,7 @@ def values(self, *values: Any, **kwargs: Any) -> Self: row_exprs.append(exp.Placeholder(this=param_name)) values_expr = exp.Values(expressions=[row_exprs]) - self._expression.set("expression", values_expr) + current_expr.set("expression", values_expr) return self def add_values(self, values: Sequence[Any]) -> Self: @@ -221,8 +231,9 @@ class InsertFromSelectMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... _table: Any # Provided by QueryBuilder @@ -250,9 +261,12 @@ def from_select(self, select_builder: Any) -> Self: except AttributeError: msg = "The target table must be set using .into() before adding values." raise SQLBuilderError(msg) - if self._expression is None: - self._expression = exp.Insert() - if not isinstance(self._expression, exp.Insert): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Insert()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Insert): msg = "Cannot set INSERT source on a non-INSERT expression." raise SQLBuilderError(msg) subquery_parameters = select_builder._parameters @@ -261,7 +275,7 @@ def from_select(self, select_builder: Any) -> Self: self.add_parameter(p_value, name=p_name) select_expr = select_builder._expression if select_expr and isinstance(select_expr, exp.Select): - self._expression.set("expression", select_expr.copy()) + current_expr.set("expression", select_expr.copy()) else: msg = "SelectBuilder must have a valid SELECT expression." raise SQLBuilderError(msg) diff --git a/sqlspec/builder/mixins/_join_operations.py b/sqlspec/builder/mixins/_join_operations.py index 2deba044a..29390781f 100644 --- a/sqlspec/builder/mixins/_join_operations.py +++ b/sqlspec/builder/mixins/_join_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """JOIN operation mixins. Provides mixins for JOIN operations in SELECT statements. diff --git a/sqlspec/builder/mixins/_merge_operations.py b/sqlspec/builder/mixins/_merge_operations.py index 9d7f165f5..0c65b1c14 100644 --- a/sqlspec/builder/mixins/_merge_operations.py +++ b/sqlspec/builder/mixins/_merge_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """MERGE operation mixins. Provides mixins for MERGE statement functionality including INTO, @@ -28,7 +29,10 @@ class MergeIntoClauseMixin: """Mixin providing INTO clause for MERGE builders.""" __slots__ = () - _expression: Optional[exp.Expression] + + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def into(self, table: Union[str, exp.Expression], alias: Optional[str] = None) -> Self: """Set the target table for the MERGE operation (INTO clause). @@ -41,11 +45,14 @@ def into(self, table: Union[str, exp.Expression], alias: Optional[str] = None) - Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) - if not isinstance(self._expression, exp.Merge): - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) - self._expression.set("this", exp.to_table(table, alias=alias) if isinstance(table, str) else table) + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Merge): + self.set_expression(exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[]))) + current_expr = self.get_expression() + + # Type guard: current_expr is now guaranteed to be an Expression + assert current_expr is not None + current_expr.set("this", exp.to_table(table, alias=alias) if isinstance(table, str) else table) return self @@ -54,7 +61,10 @@ class MergeUsingClauseMixin: """Mixin providing USING clause for MERGE builders.""" __slots__ = () - _expression: Optional[exp.Expression] + + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def add_parameter(self, value: Any, name: Optional[str] = None) -> tuple[Any, str]: """Add parameter - provided by QueryBuilder.""" @@ -75,11 +85,13 @@ def using(self, source: Union[str, exp.Expression, Any], alias: Optional[str] = Raises: SQLBuilderError: If the current expression is not a MERGE statement or if the source type is unsupported. """ - if self._expression is None: - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) - if not isinstance(self._expression, exp.Merge): - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Merge): + self.set_expression(exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[]))) + current_expr = self.get_expression() + # Type guard: current_expr is now guaranteed to be an Expression + assert current_expr is not None source_expr: exp.Expression if isinstance(source, str): source_expr = exp.to_table(source, alias=alias) @@ -99,7 +111,7 @@ def using(self, source: Union[str, exp.Expression, Any], alias: Optional[str] = msg = f"Unsupported source type for USING clause: {type(source)}" raise SQLBuilderError(msg) - self._expression.set("using", source_expr) + current_expr.set("using", source_expr) return self @@ -108,7 +120,10 @@ class MergeOnClauseMixin: """Mixin providing ON clause for MERGE builders.""" __slots__ = () - _expression: Optional[exp.Expression] + + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def on(self, condition: Union[str, exp.Expression]) -> Self: """Set the join condition for the MERGE operation (ON clause). @@ -123,11 +138,13 @@ def on(self, condition: Union[str, exp.Expression]) -> Self: Raises: SQLBuilderError: If the current expression is not a MERGE statement or if the condition type is unsupported. """ - if self._expression is None: - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) - if not isinstance(self._expression, exp.Merge): - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Merge): + self.set_expression(exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[]))) + current_expr = self.get_expression() + # Type guard: current_expr is now guaranteed to be an Expression + assert current_expr is not None condition_expr: exp.Expression if isinstance(condition, str): parsed_condition: Optional[exp.Expression] = exp.maybe_parse( @@ -143,7 +160,7 @@ def on(self, condition: Union[str, exp.Expression]) -> Self: msg = f"Unsupported condition type for ON clause: {type(condition)}" raise SQLBuilderError(msg) - self._expression.set("on", condition_expr) + current_expr.set("on", condition_expr) return self @@ -152,7 +169,10 @@ class MergeMatchedClauseMixin: """Mixin providing WHEN MATCHED THEN ... clauses for MERGE builders.""" __slots__ = () - _expression: Optional[exp.Expression] + + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def add_parameter(self, value: Any, name: Optional[str] = None) -> tuple[Any, str]: """Add parameter - provided by QueryBuilder.""" @@ -170,15 +190,17 @@ def _add_when_clause(self, when_clause: exp.When) -> None: Args: when_clause: The WHEN clause to add. """ - if self._expression is None: - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) # type: ignore[misc] - if not isinstance(self._expression, exp.Merge): - self._expression = exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[])) # type: ignore[misc] - - whens = self._expression.args.get("whens") + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Merge): + self.set_expression(exp.Merge(this=None, using=None, on=None, whens=exp.Whens(expressions=[]))) + current_expr = self.get_expression() + + # Type guard: current_expr is now guaranteed to be an Expression + assert current_expr is not None + whens = current_expr.args.get("whens") if not whens: whens = exp.Whens(expressions=[]) - self._expression.set("whens", whens) + current_expr.set("whens", whens) whens.append("expressions", when_clause) @@ -315,7 +337,9 @@ class MergeNotMatchedClauseMixin: __slots__ = () - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def add_parameter(self, value: Any, name: Optional[str] = None) -> tuple[Any, str]: """Add parameter - provided by QueryBuilder.""" @@ -415,7 +439,9 @@ class MergeNotMatchedBySourceClauseMixin: __slots__ = () - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def add_parameter(self, value: Any, name: Optional[str] = None) -> tuple[Any, str]: """Add parameter - provided by QueryBuilder.""" diff --git a/sqlspec/builder/mixins/_order_limit_operations.py b/sqlspec/builder/mixins/_order_limit_operations.py index 85eabc607..62cbf8619 100644 --- a/sqlspec/builder/mixins/_order_limit_operations.py +++ b/sqlspec/builder/mixins/_order_limit_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """ORDER BY, LIMIT, OFFSET, and RETURNING clause mixins. Provides mixins for query result ordering, limiting, and result diff --git a/sqlspec/builder/mixins/_pivot_operations.py b/sqlspec/builder/mixins/_pivot_operations.py index 3b39e05f2..f1f331108 100644 --- a/sqlspec/builder/mixins/_pivot_operations.py +++ b/sqlspec/builder/mixins/_pivot_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """PIVOT and UNPIVOT operation mixins. Provides mixins for PIVOT and UNPIVOT operations in SELECT statements. diff --git a/sqlspec/builder/mixins/_select_operations.py b/sqlspec/builder/mixins/_select_operations.py index 6233f6f9b..29fd69ee0 100644 --- a/sqlspec/builder/mixins/_select_operations.py +++ b/sqlspec/builder/mixins/_select_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """SELECT clause mixins. Provides mixins for SELECT statement functionality including column selection, @@ -28,8 +29,9 @@ class SelectClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def select(self, *columns: Union[str, exp.Expression, "Column", "FunctionColumn", "SQL", "Case"]) -> Self: """Add columns to SELECT clause. @@ -41,13 +43,17 @@ def select(self, *columns: Union[str, exp.Expression, "Column", "FunctionColumn" The current builder instance for method chaining. """ builder = cast("SQLBuilderProtocol", self) - if builder._expression is None: - builder._expression = exp.Select() - if not isinstance(builder._expression, exp.Select): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Select()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Select): msg = "Cannot add select columns to a non-SELECT expression." raise SQLBuilderError(msg) for column in columns: - builder._expression = builder._expression.select(parse_column_expression(column, builder), copy=False) + current_expr = current_expr.select(parse_column_expression(column, builder), copy=False) + self.set_expression(current_expr) return cast("Self", builder) def distinct(self, *columns: Union[str, exp.Expression, "Column", "FunctionColumn", "SQL"]) -> Self: @@ -129,13 +135,13 @@ def group_by(self, *columns: Union[str, exp.Expression]) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None or not isinstance(self._expression, exp.Select): + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Select): return self for column in columns: - self._expression = self._expression.group_by( - exp.column(column) if isinstance(column, str) else column, copy=False - ) + current_expr = current_expr.group_by(exp.column(column) if isinstance(column, str) else column, copy=False) + self.set_expression(current_expr) return self def group_by_rollup(self, *columns: Union[str, exp.Expression]) -> Self: @@ -480,9 +486,12 @@ def window( Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Select() - if not isinstance(self._expression, exp.Select): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Select()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Select): msg = "Cannot add window function to a non-SELECT expression." raise SQLBuilderError(msg) @@ -525,7 +534,8 @@ def window( over_args["frame"] = frame_expr window_expr = exp.Window(this=func_expr_parsed, **over_args) - self._expression.select(exp.alias_(window_expr, alias) if alias else window_expr, copy=False) + current_expr = current_expr.select(exp.alias_(window_expr, alias) if alias else window_expr, copy=False) + self.set_expression(current_expr) return self def case_(self, alias: "Optional[str]" = None) -> "CaseBuilder": @@ -906,3 +916,21 @@ def as_(self, alias: str) -> exp.Alias: """ case_expr = exp.Case(ifs=self._conditions, default=self._default) return cast("exp.Alias", exp.alias_(case_expr, alias)) + + @property + def conditions(self) -> "list[exp.If]": + """Get CASE conditions (public API). + + Returns: + List of If expressions representing WHEN clauses + """ + return self._conditions + + @property + def default(self) -> Optional[exp.Expression]: + """Get CASE default value (public API). + + Returns: + Default expression for the ELSE clause, or None + """ + return self._default diff --git a/sqlspec/builder/mixins/_update_operations.py b/sqlspec/builder/mixins/_update_operations.py index e110ffcd2..1dfe0bf8e 100644 --- a/sqlspec/builder/mixins/_update_operations.py +++ b/sqlspec/builder/mixins/_update_operations.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false """UPDATE operation mixins. Provides mixins for UPDATE statement functionality including @@ -25,8 +26,9 @@ class UpdateTableClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def table(self, table_name: str, alias: Optional[str] = None) -> Self: """Set the table to update. @@ -38,10 +40,14 @@ def table(self, table_name: str, alias: Optional[str] = None) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None or not isinstance(self._expression, exp.Update): - self._expression = exp.Update(this=None, expressions=[], joins=[]) + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Update): + self.set_expression(exp.Update(this=None, expressions=[], joins=[])) + current_expr = self.get_expression() + + assert current_expr is not None table_expr: exp.Expression = exp.to_table(table_name, alias=alias) - self._expression.set("this", table_expr) + current_expr.set("this", table_expr) setattr(self, "_table", table_name) return self @@ -52,8 +58,9 @@ class UpdateSetClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def add_parameter(self, value: Any, name: Optional[str] = None) -> tuple[Any, str]: """Add parameter - provided by QueryBuilder.""" @@ -130,9 +137,12 @@ def set(self, *args: Any, **kwargs: Any) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Update() - if not isinstance(self._expression, exp.Update): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Update()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Update): msg = "Cannot add SET clause to non-UPDATE expression." raise SQLBuilderError(msg) assignments = [] @@ -149,8 +159,8 @@ def set(self, *args: Any, **kwargs: Any) -> Self: else: msg = "Invalid arguments for set(): use (column, value), mapping, or kwargs." raise SQLBuilderError(msg) - existing = self._expression.args.get("expressions", []) - self._expression.set("expressions", existing + assignments) + existing = current_expr.args.get("expressions", []) + current_expr.set("expressions", existing + assignments) return self @@ -160,8 +170,9 @@ class UpdateFromClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def from_(self, table: Union[str, exp.Expression, Any], alias: Optional[str] = None) -> Self: """Add a FROM clause to the UPDATE statement. @@ -176,7 +187,8 @@ def from_(self, table: Union[str, exp.Expression, Any], alias: Optional[str] = N Raises: SQLBuilderError: If the current expression is not an UPDATE statement. """ - if self._expression is None or not isinstance(self._expression, exp.Update): + current_expr = self.get_expression() + if current_expr is None or not isinstance(current_expr, exp.Update): msg = "Cannot add FROM clause to non-UPDATE expression. Set the main table first." raise SQLBuilderError(msg) table_expr: exp.Expression @@ -194,9 +206,9 @@ def from_(self, table: Union[str, exp.Expression, Any], alias: Optional[str] = N else: msg = f"Unsupported table type for FROM clause: {type(table)}" raise SQLBuilderError(msg) - if self._expression.args.get("from") is None: - self._expression.set("from", exp.From(expressions=[])) - from_clause = self._expression.args["from"] + if current_expr.args.get("from") is None: + current_expr.set("from", exp.From(expressions=[])) + from_clause = current_expr.args["from"] if hasattr(from_clause, "append"): from_clause.append("expressions", table_expr) else: diff --git a/sqlspec/builder/mixins/_where_clause.py b/sqlspec/builder/mixins/_where_clause.py index 6655f347c..13dffe43b 100644 --- a/sqlspec/builder/mixins/_where_clause.py +++ b/sqlspec/builder/mixins/_where_clause.py @@ -1,4 +1,5 @@ # ruff: noqa: PLR2004 +# pyright: reportPrivateUsage=false, reportPrivateImportUsage=false """WHERE and HAVING clause mixins. Provides mixins for WHERE and HAVING clause functionality with @@ -14,7 +15,7 @@ from sqlglot import exp from typing_extensions import Self -from sqlspec.builder._parsing_utils import parse_column_expression, parse_condition_expression +from sqlspec.builder._parsing_utils import extract_column_name, parse_column_expression, parse_condition_expression from sqlspec.exceptions import SQLBuilderError from sqlspec.utils.type_guards import ( has_expression_and_parameters, @@ -24,31 +25,6 @@ is_iterable_parameters, ) - -def _extract_column_name(column: Union[str, exp.Column]) -> str: - """Extract column name from column expression for parameter naming. - - Args: - column: Column expression (string or SQLGlot Column) - - Returns: - Column name as string for use as parameter name - """ - if isinstance(column, str): - # Handle simple column names and table.column references - if "." in column: - return column.split(".")[-1] # Return just the column part - return column - if isinstance(column, exp.Column): - # Extract the column name from SQLGlot Column expression - try: - return str(column.this.this) - except AttributeError: - return str(column.this) if column.this else "column" - # Fallback for any unexpected types (defensive programming) - return "column" - - if TYPE_CHECKING: from sqlspec.builder._column import ColumnExpression from sqlspec.protocols import SQLBuilderProtocol @@ -62,8 +38,9 @@ class WhereClauseMixin: __slots__ = () - # Type annotation for PyRight - this will be provided by the base class - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def _create_parameterized_condition( self, @@ -82,7 +59,7 @@ def _create_parameterized_condition( The created condition expression """ builder = cast("SQLBuilderProtocol", self) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) param_name = builder._generate_unique_parameter_name(column_name) _, param_name = builder.add_parameter(value, name=param_name) col_expr = parse_column_expression(column) if not isinstance(column, exp.Column) else column @@ -118,18 +95,20 @@ def _apply_or_where(self, where_method: "Callable[..., Self]", *args: Any, **kwa Self with OR condition applied """ # Create a temporary clone to capture the condition - original_expr = self._expression + original_expr = self.get_expression() # Apply the where method to get the condition where_method(*args, **kwargs) # Get the last condition added by extracting it from the modified expression - if isinstance(self._expression, (exp.Select, exp.Update, exp.Delete)) and original_expr != self._expression: - last_where = self._expression.find(exp.Where) + current_expr = self.get_expression() + if isinstance(current_expr, (exp.Select, exp.Update, exp.Delete)) and original_expr != current_expr: + last_where = current_expr.find(exp.Where) if last_where and last_where.this: condition = last_where.this # Restore original expression - self._expression = original_expr + if original_expr is not None: + self.set_expression(original_expr) # Apply as OR return self.or_where(condition) @@ -236,7 +215,7 @@ def _process_tuple_condition(self, condition: "tuple[Any, ...]") -> exp.Expressi column_name_raw, operator, value = condition operator = str(operator).upper() column_exp = parse_column_expression(column_name_raw) - column_name = _extract_column_name(column_name_raw) + column_name = extract_column_name(column_name_raw) # Simple operators that use direct parameterization simple_operators = { @@ -299,16 +278,17 @@ def where( Returns: The current builder instance for method chaining. """ - if self.__class__.__name__ == "Update" and not isinstance(self._expression, exp.Update): + current_expr = self.get_expression() + if self.__class__.__name__ == "Update" and not isinstance(current_expr, exp.Update): msg = "Cannot add WHERE clause to non-UPDATE expression" raise SQLBuilderError(msg) builder = cast("SQLBuilderProtocol", self) - if builder._expression is None: + if current_expr is None: msg = "Cannot add WHERE clause: expression is not initialized." raise SQLBuilderError(msg) - if isinstance(builder._expression, exp.Delete) and not builder._expression.args.get("this"): + if isinstance(current_expr, exp.Delete) and not current_expr.args.get("this"): msg = "WHERE clause requires a table to be set. Use from() to set the table first." raise SQLBuilderError(msg) @@ -357,10 +337,11 @@ def where( else: where_expr = self._process_tuple_condition((condition, values[0])) # Process this condition and skip the rest - if isinstance(builder._expression, (exp.Select, exp.Update, exp.Delete)): - builder._expression = builder._expression.where(where_expr, copy=False) + if isinstance(current_expr, (exp.Select, exp.Update, exp.Delete)): + updated_expr = current_expr.where(where_expr, copy=False) + self.set_expression(updated_expr) else: - msg = f"WHERE clause not supported for {type(builder._expression).__name__}" + msg = f"WHERE clause not supported for {type(current_expr).__name__}" raise SQLBuilderError(msg) return self else: @@ -400,10 +381,11 @@ def where( msg = f"Unsupported condition type: {type(condition).__name__}" raise SQLBuilderError(msg) - if isinstance(builder._expression, (exp.Select, exp.Update, exp.Delete)): - builder._expression = builder._expression.where(where_expr, copy=False) + if isinstance(current_expr, (exp.Select, exp.Update, exp.Delete)): + updated_expr = current_expr.where(where_expr, copy=False) + self.set_expression(updated_expr) else: - msg = f"WHERE clause not supported for {type(builder._expression).__name__}" + msg = f"WHERE clause not supported for {type(current_expr).__name__}" raise SQLBuilderError(msg) return self @@ -448,7 +430,7 @@ def where_gte(self, column: Union[str, exp.Column], value: Any) -> Self: def where_between(self, column: Union[str, exp.Column], low: Any, high: Any) -> Self: """Add WHERE column BETWEEN low AND high clause.""" builder = cast("SQLBuilderProtocol", self) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) low_param = builder._generate_unique_parameter_name(f"{column_name}_low") high_param = builder._generate_unique_parameter_name(f"{column_name}_high") _, low_param = builder.add_parameter(low, name=low_param) @@ -460,7 +442,7 @@ def where_between(self, column: Union[str, exp.Column], low: Any, high: Any) -> def where_like(self, column: Union[str, exp.Column], pattern: str, escape: Optional[str] = None) -> Self: """Add WHERE column LIKE pattern clause.""" builder = cast("SQLBuilderProtocol", self) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) param_name = builder._generate_unique_parameter_name(column_name) _, param_name = builder.add_parameter(pattern, name=param_name) col_expr = parse_column_expression(column) if not isinstance(column, exp.Column) else column @@ -519,7 +501,7 @@ def where_in(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, (str, bytes)): msg = "Unsupported type for 'values' in WHERE IN" raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -548,7 +530,7 @@ def where_not_in(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, (str, bytes)): msg = "Values for where_not_in must be a non-string iterable or subquery." raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -638,7 +620,7 @@ def where_any(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, bytes): msg = "Unsupported type for 'values' in WHERE ANY" raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -678,7 +660,7 @@ def where_not_any(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, bytes): msg = "Unsupported type for 'values' in WHERE NOT ANY" raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -975,7 +957,7 @@ def or_where_in(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, (str, bytes)): msg = "Unsupported type for 'values' in OR WHERE IN" raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -990,7 +972,7 @@ def or_where_in(self, column: Union[str, exp.Column], values: Any) -> Self: def or_where_like(self, column: Union[str, exp.Column], pattern: str, escape: Optional[str] = None) -> Self: """Add OR column LIKE pattern clause.""" builder = cast("SQLBuilderProtocol", self) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) param_name = builder._generate_unique_parameter_name(column_name) _, param_name = builder.add_parameter(pattern, name=param_name) col_expr = parse_column_expression(column) if not isinstance(column, exp.Column) else column @@ -1044,7 +1026,7 @@ def or_where_gte(self, column: Union[str, exp.Column], value: Any) -> Self: def or_where_between(self, column: Union[str, exp.Column], low: Any, high: Any) -> Self: """Add OR column BETWEEN low AND high clause.""" builder = cast("SQLBuilderProtocol", self) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) low_param = builder._generate_unique_parameter_name(f"{column_name}_low") high_param = builder._generate_unique_parameter_name(f"{column_name}_high") _, low_param = builder.add_parameter(low, name=low_param) @@ -1092,7 +1074,7 @@ def or_where_not_in(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, (str, bytes)): msg = "Values for or_where_not_in must be a non-string iterable or subquery." raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -1216,7 +1198,7 @@ def or_where_any(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, bytes): msg = "Unsupported type for 'values' in OR WHERE ANY" raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -1272,7 +1254,7 @@ def or_where_not_any(self, column: Union[str, exp.Column], values: Any) -> Self: if not is_iterable_parameters(values) or isinstance(values, bytes): msg = "Unsupported type for 'values' in OR WHERE NOT ANY" raise SQLBuilderError(msg) - column_name = _extract_column_name(column) + column_name = extract_column_name(column) parameters = [] for i, v in enumerate(values): if len(values) == 1: @@ -1292,7 +1274,9 @@ class HavingClauseMixin: __slots__ = () - _expression: Optional[exp.Expression] + # Type annotations for PyRight - these will be provided by the base class + def get_expression(self) -> Optional[exp.Expression]: ... + def set_expression(self, expression: exp.Expression) -> None: ... def having(self, condition: Union[str, exp.Expression]) -> Self: """Add HAVING clause. @@ -1306,11 +1290,15 @@ def having(self, condition: Union[str, exp.Expression]) -> Self: Returns: The current builder instance for method chaining. """ - if self._expression is None: - self._expression = exp.Select() - if not isinstance(self._expression, exp.Select): + current_expr = self.get_expression() + if current_expr is None: + self.set_expression(exp.Select()) + current_expr = self.get_expression() + + if not isinstance(current_expr, exp.Select): msg = "Cannot add HAVING to a non-SELECT expression." raise SQLBuilderError(msg) having_expr = exp.condition(condition) if isinstance(condition, str) else condition - self._expression = self._expression.having(having_expr, copy=False) + updated_expr = current_expr.having(having_expr, copy=False) + self.set_expression(updated_expr) return self diff --git a/sqlspec/core/__init__.py b/sqlspec/core/__init__.py index 1036f1510..b5408197f 100644 --- a/sqlspec/core/__init__.py +++ b/sqlspec/core/__init__.py @@ -90,7 +90,7 @@ """ from sqlspec.core import filters -from sqlspec.core.cache import CacheConfig, CacheStats, UnifiedCache, get_statement_cache +from sqlspec.core.cache import CacheConfig, CacheStats, MultiLevelCache, UnifiedCache, get_cache from sqlspec.core.compiler import OperationType, SQLProcessor from sqlspec.core.filters import StatementFilter from sqlspec.core.hashing import ( @@ -115,6 +115,7 @@ "ArrowResult", "CacheConfig", "CacheStats", + "MultiLevelCache", "OperationType", "ParameterConverter", "ParameterProcessor", @@ -129,7 +130,7 @@ "TypedParameter", "UnifiedCache", "filters", - "get_statement_cache", + "get_cache", "hash_expression", "hash_expression_node", "hash_optimized_expression", diff --git a/sqlspec/core/cache.py b/sqlspec/core/cache.py index 94565445f..c188e8db9 100644 --- a/sqlspec/core/cache.py +++ b/sqlspec/core/cache.py @@ -13,6 +13,7 @@ import threading import time +from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Final, Optional from mypy_extensions import mypyc_attr @@ -21,23 +22,24 @@ from sqlspec.utils.logging import get_logger if TYPE_CHECKING: + from collections.abc import Iterator + import sqlglot.expressions as exp - from sqlspec.core.statement import SQL __all__ = ( "CacheKey", "CacheStats", - "ExpressionCache", - "ParameterCache", - "StatementCache", + "CachedStatement", + "FiltersView", + "MultiLevelCache", + "ParametersView", "UnifiedCache", + "canonicalize_filters", + "create_cache_key", + "get_cache", "get_cache_config", "get_default_cache", - "get_expression_cache", - "get_parameter_cache", - "get_statement_cache", - "sql_cache", ) T = TypeVar("T") @@ -339,202 +341,7 @@ def __contains__(self, key: CacheKey) -> bool: return not (ttl is not None and time.time() - node.timestamp > ttl) -@mypyc_attr(allow_interpreted_subclasses=False) -class StatementCache: - """Cache for compiled SQL statements.""" - - def __init__(self, max_size: int = DEFAULT_MAX_SIZE) -> None: - """Initialize statement cache. - - Args: - max_size: Maximum number of statements to cache - """ - self._cache: UnifiedCache = UnifiedCache(max_size) - - def get_compiled(self, statement: "SQL") -> Optional[tuple[str, Any]]: - """Get compiled SQL and parameters from cache. - - Args: - statement: SQL statement to lookup - - Returns: - Tuple of (compiled_sql, parameters) or None if not found - """ - cache_key = self._create_statement_key(statement) - return self._cache.get(cache_key) - - def put_compiled(self, statement: "SQL", compiled_sql: str, parameters: Any) -> None: - """Cache compiled SQL and parameters. - - Args: - statement: Original SQL statement - compiled_sql: Compiled SQL string - parameters: Processed parameters - """ - cache_key = self._create_statement_key(statement) - self._cache.put(cache_key, (compiled_sql, parameters)) - - def _create_statement_key(self, statement: "SQL") -> CacheKey: - """Create cache key for SQL statement. - - Args: - statement: SQL statement - - Returns: - Cache key for the statement - """ - - key_data = ( - "statement", - statement._raw_sql, - hash(statement), - str(statement.dialect) if statement.dialect else None, - statement.is_many, - statement.is_script, - ) - return CacheKey(key_data) - - def clear(self) -> None: - """Clear statement cache.""" - self._cache.clear() - - def get_stats(self) -> CacheStats: - """Get cache statistics.""" - return self._cache.get_stats() - - -@mypyc_attr(allow_interpreted_subclasses=False) -class ExpressionCache: - """Cache for parsed expressions.""" - - def __init__(self, max_size: int = DEFAULT_MAX_SIZE) -> None: - """Initialize expression cache. - - Args: - max_size: Maximum number of expressions to cache - """ - self._cache: UnifiedCache = UnifiedCache(max_size) - - def get_expression(self, sql: str, dialect: Optional[str] = None) -> "Optional[exp.Expression]": - """Get parsed expression from cache. - - Args: - sql: SQL string - dialect: SQL dialect - - Returns: - Parsed expression or None if not found - """ - cache_key = self._create_expression_key(sql, dialect) - return self._cache.get(cache_key) - - def put_expression(self, sql: str, expression: "exp.Expression", dialect: Optional[str] = None) -> None: - """Cache parsed expression. - - Args: - sql: SQL string - expression: Parsed SQLGlot expression - dialect: SQL dialect - """ - cache_key = self._create_expression_key(sql, dialect) - self._cache.put(cache_key, expression) - - def _create_expression_key(self, sql: str, dialect: Optional[str]) -> CacheKey: - """Create cache key for expression. - - Args: - sql: SQL string - dialect: SQL dialect - - Returns: - Cache key for the expression - """ - key_data = ("expression", sql, dialect) - return CacheKey(key_data) - - def clear(self) -> None: - """Clear expression cache.""" - self._cache.clear() - - def get_stats(self) -> CacheStats: - """Get cache statistics.""" - return self._cache.get_stats() - - -@mypyc_attr(allow_interpreted_subclasses=False) -class ParameterCache: - """Cache for processed parameters.""" - - def __init__(self, max_size: int = DEFAULT_MAX_SIZE) -> None: - """Initialize parameter cache. - - Args: - max_size: Maximum number of parameter sets to cache - """ - self._cache: UnifiedCache = UnifiedCache(max_size) - - def get_parameters(self, original_params: Any, config_hash: int) -> Optional[Any]: - """Get processed parameters from cache. - - Args: - original_params: Original parameters - config_hash: Hash of parameter processing configuration - - Returns: - Processed parameters or None if not found - """ - cache_key = self._create_parameter_key(original_params, config_hash) - return self._cache.get(cache_key) - - def put_parameters(self, original_params: Any, processed_params: Any, config_hash: int) -> None: - """Cache processed parameters. - - Args: - original_params: Original parameters - processed_params: Processed parameters - config_hash: Hash of parameter processing configuration - """ - cache_key = self._create_parameter_key(original_params, config_hash) - self._cache.put(cache_key, processed_params) - - def _create_parameter_key(self, params: Any, config_hash: int) -> CacheKey: - """Create cache key for parameters. - - Args: - params: Parameters to cache - config_hash: Configuration hash - - Returns: - Cache key for the parameters - """ - - try: - param_key: tuple[Any, ...] - if isinstance(params, dict): - param_key = tuple(sorted(params.items())) - elif isinstance(params, (list, tuple)): - param_key = tuple(params) - else: - param_key = (params,) - - return CacheKey(("parameters", param_key, config_hash)) - except (TypeError, ValueError): - param_key_fallback = (str(params), type(params).__name__) - return CacheKey(("parameters", param_key_fallback, config_hash)) - - def clear(self) -> None: - """Clear parameter cache.""" - self._cache.clear() - - def get_stats(self) -> CacheStats: - """Get cache statistics.""" - return self._cache.get_stats() - - _default_cache: Optional[UnifiedCache] = None -_statement_cache: Optional[StatementCache] = None -_expression_cache: Optional[ExpressionCache] = None -_parameter_cache: Optional[ParameterCache] = None _cache_lock = threading.Lock() @@ -552,58 +359,12 @@ def get_default_cache() -> UnifiedCache: return _default_cache -def get_statement_cache() -> StatementCache: - """Get the statement cache instance. - - Returns: - Singleton statement cache instance - """ - global _statement_cache - if _statement_cache is None: - with _cache_lock: - if _statement_cache is None: - _statement_cache = StatementCache() - return _statement_cache - - -def get_expression_cache() -> ExpressionCache: - """Get the expression cache instance. - - Returns: - Singleton expression cache instance - """ - global _expression_cache - if _expression_cache is None: - with _cache_lock: - if _expression_cache is None: - _expression_cache = ExpressionCache() - return _expression_cache - - -def get_parameter_cache() -> ParameterCache: - """Get the parameter cache instance. - - Returns: - Singleton parameter cache instance - """ - global _parameter_cache - if _parameter_cache is None: - with _cache_lock: - if _parameter_cache is None: - _parameter_cache = ParameterCache() - return _parameter_cache - - def clear_all_caches() -> None: """Clear all cache instances.""" if _default_cache is not None: _default_cache.clear() - if _statement_cache is not None: - _statement_cache.clear() - if _expression_cache is not None: - _expression_cache.clear() - if _parameter_cache is not None: - _parameter_cache.clear() + cache = get_cache() + cache.clear() def get_cache_statistics() -> dict[str, CacheStats]: @@ -615,12 +376,8 @@ def get_cache_statistics() -> dict[str, CacheStats]: stats = {} if _default_cache is not None: stats["default"] = _default_cache.get_stats() - if _statement_cache is not None: - stats["statement"] = _statement_cache.get_stats() - if _expression_cache is not None: - stats["expression"] = _expression_cache.get_stats() - if _parameter_cache is not None: - stats["parameter"] = _parameter_cache.get_stats() + cache = get_cache() + stats["multi_level"] = cache.get_stats() return stats @@ -690,8 +447,8 @@ def update_cache_config(config: CacheConfig) -> None: unified_cache = get_default_cache() unified_cache.clear() - statement_cache = get_statement_cache() - statement_cache.clear() + cache = get_cache() + cache.clear() logger = get_logger("sqlspec.cache") logger.info( @@ -705,87 +462,13 @@ def update_cache_config(config: CacheConfig) -> None: ) -@mypyc_attr(allow_interpreted_subclasses=False) -class CacheStatsAggregate: - """Cache statistics from all cache instances.""" - - __slots__ = ( - "fragment_capacity", - "fragment_hit_rate", - "fragment_hits", - "fragment_misses", - "fragment_size", - "optimized_capacity", - "optimized_hit_rate", - "optimized_hits", - "optimized_misses", - "optimized_size", - "sql_capacity", - "sql_hit_rate", - "sql_hits", - "sql_misses", - "sql_size", - ) - - def __init__(self) -> None: - """Initialize cache statistics.""" - self.sql_hit_rate = 0.0 - self.fragment_hit_rate = 0.0 - self.optimized_hit_rate = 0.0 - self.sql_size = 0 - self.fragment_size = 0 - self.optimized_size = 0 - self.sql_capacity = 0 - self.fragment_capacity = 0 - self.optimized_capacity = 0 - self.sql_hits = 0 - self.sql_misses = 0 - self.fragment_hits = 0 - self.fragment_misses = 0 - self.optimized_hits = 0 - self.optimized_misses = 0 - - -def get_cache_stats() -> CacheStatsAggregate: +def get_cache_stats() -> dict[str, CacheStats]: """Get cache statistics from all caches. Returns: - Cache statistics object + Dictionary of cache statistics """ - stats_dict = get_cache_statistics() - stats = CacheStatsAggregate() - - for cache_name, cache_stats in stats_dict.items(): - hits = cache_stats.hits - misses = cache_stats.misses - size = 0 - - if "sql" in cache_name.lower(): - stats.sql_hits += hits - stats.sql_misses += misses - stats.sql_size += size - elif "fragment" in cache_name.lower(): - stats.fragment_hits += hits - stats.fragment_misses += misses - stats.fragment_size += size - elif "optimized" in cache_name.lower(): - stats.optimized_hits += hits - stats.optimized_misses += misses - stats.optimized_size += size - - sql_total = stats.sql_hits + stats.sql_misses - if sql_total > 0: - stats.sql_hit_rate = stats.sql_hits / sql_total - - fragment_total = stats.fragment_hits + stats.fragment_misses - if fragment_total > 0: - stats.fragment_hit_rate = stats.fragment_hits / fragment_total - - optimized_total = stats.optimized_hits + stats.optimized_misses - if optimized_total > 0: - stats.optimized_hit_rate = stats.optimized_hits / optimized_total - - return stats + return get_cache_statistics() def reset_cache_stats() -> None: @@ -801,24 +484,287 @@ def log_cache_stats() -> None: @mypyc_attr(allow_interpreted_subclasses=False) -class SQLCompilationCache: - """Wrapper around StatementCache for compatibility.""" +class ParametersView: + """Read-only view of parameters without copying. - __slots__ = ("_statement_cache", "_unified_cache") + Provides read-only access to parameters without making copies, + enabling zero-copy parameter access patterns. + """ - def __init__(self) -> None: - self._statement_cache = get_statement_cache() - self._unified_cache = get_default_cache() + __slots__ = ("_named_ref", "_positional_ref") - def get(self, cache_key: str) -> Optional[tuple[str, Any]]: - """Get cached compiled SQL and parameters.""" - key = CacheKey((cache_key,)) - return self._unified_cache.get(key) + def __init__(self, positional: list[Any], named: dict[str, Any]) -> None: + """Initialize parameters view. - def set(self, cache_key: str, value: tuple[str, Any]) -> None: - """Set cached compiled SQL and parameters.""" - key = CacheKey((cache_key,)) - self._unified_cache.put(key, value) + Args: + positional: List of positional parameters (will be referenced, not copied) + named: Dictionary of named parameters (will be referenced, not copied) + """ + self._positional_ref = positional + self._named_ref = named + def get_positional(self, index: int) -> Any: + """Get positional parameter by index. -sql_cache = SQLCompilationCache() + Args: + index: Parameter index + + Returns: + Parameter value + """ + return self._positional_ref[index] + + def get_named(self, key: str) -> Any: + """Get named parameter by key. + + Args: + key: Parameter name + + Returns: + Parameter value + """ + return self._named_ref[key] + + def has_named(self, key: str) -> bool: + """Check if named parameter exists. + + Args: + key: Parameter name + + Returns: + True if parameter exists + """ + return key in self._named_ref + + @property + def positional_count(self) -> int: + """Number of positional parameters.""" + return len(self._positional_ref) + + @property + def named_count(self) -> int: + """Number of named parameters.""" + return len(self._named_ref) + + +@mypyc_attr(allow_interpreted_subclasses=False) +@dataclass(frozen=True) +class CachedStatement: + """Immutable cached statement result. + + This class stores compiled SQL and parameters in an immutable format + that can be safely shared between different parts of the system without + risk of mutation. Tuple parameters ensure no copying is needed. + """ + + compiled_sql: str + parameters: Optional[tuple[Any, ...]] # None allowed for static script compilation + expression: Optional["exp.Expression"] + + def get_parameters_view(self) -> "ParametersView": + """Get read-only parameter view. + + Returns: + View object that provides read-only access to parameters + """ + if self.parameters is None: + return ParametersView([], {}) + return ParametersView(list(self.parameters), {}) + + +def create_cache_key(level: str, key: str, dialect: Optional[str] = None) -> str: + """Create optimized cache key using string concatenation. + + Args: + level: Cache level (statement, expression, parameter) + key: Base cache key + dialect: SQL dialect (optional) + + Returns: + Optimized cache key string + """ + return f"{level}:{dialect or 'default'}:{key}" + + +@mypyc_attr(allow_interpreted_subclasses=False) +class MultiLevelCache: + """Single cache with namespace isolation - no connection pool complexity.""" + + __slots__ = ("_cache",) + + def __init__(self, max_size: int = DEFAULT_MAX_SIZE, ttl_seconds: Optional[int] = DEFAULT_TTL_SECONDS) -> None: + """Initialize multi-level cache. + + Args: + max_size: Maximum number of cache entries + ttl_seconds: Time-to-live in seconds (None for no expiration) + """ + self._cache = UnifiedCache(max_size, ttl_seconds) + + def get(self, level: str, key: str, dialect: Optional[str] = None) -> Optional[Any]: + """Get value from cache with level and dialect namespace. + + Args: + level: Cache level (e.g., "statement", "expression", "parameter") + key: Cache key + dialect: SQL dialect (optional) + + Returns: + Cached value or None if not found + """ + full_key = create_cache_key(level, key, dialect) + cache_key = CacheKey((full_key,)) + return self._cache.get(cache_key) + + def put(self, level: str, key: str, value: Any, dialect: Optional[str] = None) -> None: + """Put value in cache with level and dialect namespace. + + Args: + level: Cache level (e.g., "statement", "expression", "parameter") + key: Cache key + value: Value to cache + dialect: SQL dialect (optional) + """ + full_key = create_cache_key(level, key, dialect) + cache_key = CacheKey((full_key,)) + self._cache.put(cache_key, value) + + def delete(self, level: str, key: str, dialect: Optional[str] = None) -> bool: + """Delete entry from cache. + + Args: + level: Cache level + key: Cache key to delete + dialect: SQL dialect (optional) + + Returns: + True if key was found and deleted, False otherwise + """ + full_key = create_cache_key(level, key, dialect) + cache_key = CacheKey((full_key,)) + return self._cache.delete(cache_key) + + def clear(self) -> None: + """Clear all cache entries.""" + self._cache.clear() + + def get_stats(self) -> CacheStats: + """Get cache statistics.""" + return self._cache.get_stats() + + +_multi_level_cache: Optional[MultiLevelCache] = None + + +def get_cache() -> MultiLevelCache: + """Get the multi-level cache instance. + + Returns: + Singleton multi-level cache instance + """ + global _multi_level_cache + if _multi_level_cache is None: + with _cache_lock: + if _multi_level_cache is None: + _multi_level_cache = MultiLevelCache() + return _multi_level_cache + + +@dataclass(frozen=True) +class Filter: + """Immutable filter that can be safely shared.""" + + field_name: str + operation: str + value: Any + + def __post_init__(self) -> None: + """Validate filter parameters.""" + if not self.field_name: + msg = "Field name cannot be empty" + raise ValueError(msg) + if not self.operation: + msg = "Operation cannot be empty" + raise ValueError(msg) + + +def canonicalize_filters(filters: "list[Filter]") -> "tuple[Filter, ...]": + """Create canonical representation of filters for cache keys. + + Args: + filters: List of filters to canonicalize + + Returns: + Tuple of unique filters sorted by field_name, operation, then value + """ + if not filters: + return () + + # Deduplicate and sort for canonical representation + unique_filters = set(filters) + return tuple(sorted(unique_filters, key=lambda f: (f.field_name, f.operation, str(f.value)))) + + +@mypyc_attr(allow_interpreted_subclasses=False) +class FiltersView: + """Read-only view of filters without copying. + + Provides zero-copy access to filters with methods for querying, + iteration, and canonical representation generation. + """ + + __slots__ = ("_filters_ref",) + + def __init__(self, filters: "list[Any]") -> None: + """Initialize filters view. + + Args: + filters: List of filters (will be referenced, not copied) + """ + self._filters_ref = filters + + def __len__(self) -> int: + """Get number of filters.""" + return len(self._filters_ref) + + def __iter__(self) -> "Iterator[Any]": + """Iterate over filters.""" + return iter(self._filters_ref) + + def get_by_field(self, field_name: str) -> "list[Any]": + """Get all filters for a specific field. + + Args: + field_name: Field name to filter by + + Returns: + List of filters matching the field name + """ + return [f for f in self._filters_ref if hasattr(f, "field_name") and f.field_name == field_name] + + def has_field(self, field_name: str) -> bool: + """Check if any filter exists for a field. + + Args: + field_name: Field name to check + + Returns: + True if field has filters + """ + return any(hasattr(f, "field_name") and f.field_name == field_name for f in self._filters_ref) + + def to_canonical(self) -> "tuple[Any, ...]": + """Create canonical representation for cache keys. + + Returns: + Canonical tuple representation of filters + """ + # Convert to Filter objects if needed, then canonicalize + filter_objects = [] + for f in self._filters_ref: + if isinstance(f, Filter): + filter_objects.append(f) + elif hasattr(f, "field_name") and hasattr(f, "operation") and hasattr(f, "value"): + filter_objects.append(Filter(f.field_name, f.operation, f.value)) + + return canonicalize_filters(filter_objects) diff --git a/sqlspec/core/compiler.py b/sqlspec/core/compiler.py index e784c79e9..c0dcdaae4 100644 --- a/sqlspec/core/compiler.py +++ b/sqlspec/core/compiler.py @@ -171,7 +171,7 @@ def compile(self, sql: str, parameters: Any = None, is_many: bool = False) -> Co if not self._config.enable_caching: return self._compile_uncached(sql, parameters, is_many) - cache_key = self._make_cache_key(sql, parameters) + cache_key = self._make_cache_key(sql, parameters, is_many) if cache_key in self._cache: result = self._cache[cache_key] @@ -216,7 +216,7 @@ def _compile_uncached(self, sql: str, parameters: Any, is_many: bool = False) -> if self._config.parameter_config.needs_static_script_compilation and processed_params is None: sqlglot_sql = processed_sql else: - sqlglot_sql, _ = self._parameter_processor._get_sqlglot_compatible_sql( + sqlglot_sql, _ = self._parameter_processor.get_sqlglot_compatible_sql( sql, parameters, self._config.parameter_config, dialect_str ) @@ -273,12 +273,13 @@ def _compile_uncached(self, sql: str, parameters: Any, is_many: bool = False) -> logger.warning("Compilation failed, using fallback: %s", e) return CompiledSQL(compiled_sql=sql, execution_parameters=parameters, operation_type="UNKNOWN") - def _make_cache_key(self, sql: str, parameters: Any) -> str: + def _make_cache_key(self, sql: str, parameters: Any, is_many: bool = False) -> str: """Generate cache key. Args: sql: SQL string parameters: Parameter values + is_many: Whether this is for execute_many operation Returns: Cache key string @@ -295,6 +296,7 @@ def _make_cache_key(self, sql: str, parameters: Any) -> str: dialect_str, self._config.enable_parsing, self._config.enable_transformations, + is_many, ) hash_str = hashlib.sha256(str(hash_data).encode("utf-8")).hexdigest()[:16] diff --git a/sqlspec/core/filters.py b/sqlspec/core/filters.py index fb64172a3..cff544047 100644 --- a/sqlspec/core/filters.py +++ b/sqlspec/core/filters.py @@ -52,6 +52,8 @@ "SearchFilter", "StatementFilter", "apply_filter", + "canonicalize_filters", + "create_filters", ) T = TypeVar("T") @@ -71,7 +73,7 @@ def append_to_statement(self, statement: "SQL") -> "SQL": Parameters should be provided via extract_parameters(). """ - def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: + def extract_parameters(self) -> "tuple[list[Any], dict[str, Any]]": """Extract parameters that this filter contributes. Returns: @@ -91,7 +93,7 @@ def _resolve_parameter_conflicts(self, statement: "SQL", proposed_names: list[st Returns: List of resolved parameter names (same length as proposed_names) """ - existing_params = set(statement._named_parameters.keys()) + existing_params = set(statement.named_parameters.keys()) existing_params.update(statement.parameters.keys() if isinstance(statement.parameters, dict) else []) resolved_names = [] @@ -121,39 +123,44 @@ class BeforeAfterFilter(StatementFilter): Applies WHERE clauses for before/after datetime filtering. """ - __slots__ = ("_param_name_after", "_param_name_before", "after", "before", "field_name") - - field_name: str - before: Optional[datetime] - after: Optional[datetime] + __slots__ = ("_after", "_before", "_field_name") def __init__(self, field_name: str, before: Optional[datetime] = None, after: Optional[datetime] = None) -> None: - """Initialize the BeforeAfterFilter. + self._field_name = field_name + self._before = before + self._after = after - Args: - field_name: Name of the model attribute to filter on. - before: Filter results where field earlier than this. - after: Filter results where field later than this. - """ - self.field_name = field_name - self.before = before - self.after = after + @property + def field_name(self) -> str: + return self._field_name + + @property + def before(self) -> Optional[datetime]: + return self._before - self._param_name_before: Optional[str] = None - self._param_name_after: Optional[str] = None + @property + def after(self) -> Optional[datetime]: + return self._after + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + names = [] if self.before: - self._param_name_before = f"{self.field_name}_before" + names.append(f"{self.field_name}_before") if self.after: - self._param_name_after = f"{self.field_name}_after" + names.append(f"{self.field_name}_after") + return names def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} - if self.before and self._param_name_before: - named_parameters[self._param_name_before] = self.before - if self.after and self._param_name_after: - named_parameters[self._param_name_after] = self.after + param_names = self.get_param_names() + param_idx = 0 + if self.before: + named_parameters[param_names[param_idx]] = self.before + param_idx += 1 + if self.after: + named_parameters[param_names[param_idx]] = self.after return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": @@ -161,12 +168,7 @@ def append_to_statement(self, statement: "SQL") -> "SQL": conditions: list[Condition] = [] col_expr = exp.column(self.field_name) - proposed_names = [] - if self.before and self._param_name_before: - proposed_names.append(self._param_name_before) - if self.after and self._param_name_after: - proposed_names.append(self._param_name_after) - + proposed_names = self.get_param_names() if not proposed_names: return statement @@ -174,13 +176,13 @@ def append_to_statement(self, statement: "SQL") -> "SQL": param_idx = 0 result = statement - if self.before and self._param_name_before: + if self.before: before_param_name = resolved_names[param_idx] param_idx += 1 conditions.append(exp.LT(this=col_expr, expression=exp.Placeholder(this=before_param_name))) result = result.add_named_parameter(before_param_name, self.before) - if self.after and self._param_name_after: + if self.after: after_param_name = resolved_names[param_idx] conditions.append(exp.GT(this=col_expr, expression=exp.Placeholder(this=after_param_name))) result = result.add_named_parameter(after_param_name, self.after) @@ -201,52 +203,52 @@ class OnBeforeAfterFilter(StatementFilter): Applies WHERE clauses for on-or-before/on-or-after datetime filtering. """ - __slots__ = ("_param_name_on_or_after", "_param_name_on_or_before", "field_name", "on_or_after", "on_or_before") - - field_name: str - on_or_before: Optional[datetime] - on_or_after: Optional[datetime] + __slots__ = ("_field_name", "_on_or_after", "_on_or_before") def __init__( self, field_name: str, on_or_before: Optional[datetime] = None, on_or_after: Optional[datetime] = None ) -> None: - """Initialize the OnBeforeAfterFilter. + self._field_name = field_name + self._on_or_before = on_or_before + self._on_or_after = on_or_after - Args: - field_name: Name of the model attribute to filter on. - on_or_before: Filter results where field is on or earlier than this. - on_or_after: Filter results where field on or later than this. - """ - self.field_name = field_name - self.on_or_before = on_or_before - self.on_or_after = on_or_after + @property + def field_name(self) -> str: + return self._field_name + + @property + def on_or_before(self) -> Optional[datetime]: + return self._on_or_before - self._param_name_on_or_before: Optional[str] = None - self._param_name_on_or_after: Optional[str] = None + @property + def on_or_after(self) -> Optional[datetime]: + return self._on_or_after + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + names = [] if self.on_or_before: - self._param_name_on_or_before = f"{self.field_name}_on_or_before" + names.append(f"{self.field_name}_on_or_before") if self.on_or_after: - self._param_name_on_or_after = f"{self.field_name}_on_or_after" + names.append(f"{self.field_name}_on_or_after") + return names def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} - if self.on_or_before and self._param_name_on_or_before: - named_parameters[self._param_name_on_or_before] = self.on_or_before - if self.on_or_after and self._param_name_on_or_after: - named_parameters[self._param_name_on_or_after] = self.on_or_after + param_names = self.get_param_names() + param_idx = 0 + if self.on_or_before: + named_parameters[param_names[param_idx]] = self.on_or_before + param_idx += 1 + if self.on_or_after: + named_parameters[param_names[param_idx]] = self.on_or_after return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": conditions: list[Condition] = [] - proposed_names = [] - if self.on_or_before and self._param_name_on_or_before: - proposed_names.append(self._param_name_on_or_before) - if self.on_or_after and self._param_name_on_or_after: - proposed_names.append(self._param_name_on_or_after) - + proposed_names = self.get_param_names() if not proposed_names: return statement @@ -254,7 +256,7 @@ def append_to_statement(self, statement: "SQL") -> "SQL": param_idx = 0 result = statement - if self.on_or_before and self._param_name_on_or_before: + if self.on_or_before: before_param_name = resolved_names[param_idx] param_idx += 1 conditions.append( @@ -262,7 +264,7 @@ def append_to_statement(self, statement: "SQL") -> "SQL": ) result = result.add_named_parameter(before_param_name, self.on_or_before) - if self.on_or_after and self._param_name_on_or_after: + if self.on_or_after: after_param_name = resolved_names[param_idx] conditions.append( exp.GTE(this=exp.column(self.field_name), expression=exp.Placeholder(this=after_param_name)) @@ -294,33 +296,33 @@ class InCollectionFilter(InAnyFilter[T]): Constructs WHERE ... IN (...) clauses. """ - __slots__ = ("_param_names", "field_name", "values") + __slots__ = ("_field_name", "_values") - field_name: str - values: Optional[abc.Collection[T]] + def __init__(self, field_name: str, values: Optional[abc.Collection[T]] = None) -> None: + self._field_name = field_name + self._values = values - def __init__(self, field_name: str, values: Optional[abc.Collection[T]]) -> None: - """Initialize the InCollectionFilter. + @property + def field_name(self) -> str: + return self._field_name - Args: - field_name: Name of the model attribute to filter on. - values: Values for ``IN`` clause. An empty list will return an empty result set, - however, if ``None``, the filter is not applied to the query, and all rows are returned. - """ - self.field_name = field_name - self.values = values + @property + def values(self) -> Optional[abc.Collection[T]]: + return self._values - self._param_names: list[str] = [] - if self.values: - for i, _ in enumerate(self.values): - self._param_names.append(f"{self.field_name}_in_{i}") + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + if not self.values: + return [] + return [f"{self.field_name}_in_{i}" for i, _ in enumerate(self.values)] def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} if self.values: + param_names = self.get_param_names() for i, value in enumerate(self.values): - named_parameters[self._param_names[i]] = value + named_parameters[param_names[i]] = value return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": @@ -330,7 +332,7 @@ def append_to_statement(self, statement: "SQL") -> "SQL": if not self.values: return statement.where(exp.false()) - resolved_names = self._resolve_parameter_conflicts(statement, self._param_names) + resolved_names = self._resolve_parameter_conflicts(statement, self.get_param_names()) placeholder_expressions: list[exp.Placeholder] = [ exp.Placeholder(this=param_name) for param_name in resolved_names @@ -354,39 +356,41 @@ class NotInCollectionFilter(InAnyFilter[T]): Constructs WHERE ... NOT IN (...) clauses. """ - __slots__ = ("_param_names", "field_name", "values") + __slots__ = ("_field_name", "_values") - field_name: str - values: Optional[abc.Collection[T]] + def __init__(self, field_name: str, values: Optional[abc.Collection[T]] = None) -> None: + self._field_name = field_name + self._values = values - def __init__(self, field_name: str, values: Optional[abc.Collection[T]]) -> None: - """Initialize the NotInCollectionFilter. + @property + def field_name(self) -> str: + return self._field_name - Args: - field_name: Name of the model attribute to filter on. - values: Values for ``NOT IN`` clause. An empty list or ``None`` will return all rows. - """ - self.field_name = field_name - self.values = values + @property + def values(self) -> Optional[abc.Collection[T]]: + return self._values - self._param_names: list[str] = [] - if self.values: - for i, _ in enumerate(self.values): - self._param_names.append(f"{self.field_name}_notin_{i}_{id(self)}") + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + if not self.values: + return [] + # Use object id to ensure uniqueness between instances + return [f"{self.field_name}_notin_{i}_{id(self)}" for i, _ in enumerate(self.values)] def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} if self.values: + param_names = self.get_param_names() for i, value in enumerate(self.values): - named_parameters[self._param_names[i]] = value + named_parameters[param_names[i]] = value return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": if self.values is None or not self.values: return statement - resolved_names = self._resolve_parameter_conflicts(statement, self._param_names) + resolved_names = self._resolve_parameter_conflicts(statement, self.get_param_names()) placeholder_expressions: list[exp.Placeholder] = [ exp.Placeholder(this=param_name) for param_name in resolved_names @@ -412,34 +416,33 @@ class AnyCollectionFilter(InAnyFilter[T]): Constructs WHERE column_name = ANY (array_expression) clauses. """ - __slots__ = ("_param_names", "field_name", "values") + __slots__ = ("_field_name", "_values") - field_name: str - values: Optional[abc.Collection[T]] + def __init__(self, field_name: str, values: Optional[abc.Collection[T]] = None) -> None: + self._field_name = field_name + self._values = values - def __init__(self, field_name: str, values: Optional[abc.Collection[T]]) -> None: - """Initialize the AnyCollectionFilter. + @property + def field_name(self) -> str: + return self._field_name - Args: - field_name: Name of the model attribute to filter on. - values: Values for ``= ANY (...)`` clause. An empty list will result in a condition - that is always false (no rows returned). If ``None``, the filter is not applied - to the query, and all rows are returned. - """ - self.field_name = field_name - self.values = values + @property + def values(self) -> Optional[abc.Collection[T]]: + return self._values - self._param_names: list[str] = [] - if self.values: - for i, _ in enumerate(self.values): - self._param_names.append(f"{self.field_name}_any_{i}") + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + if not self.values: + return [] + return [f"{self.field_name}_any_{i}" for i, _ in enumerate(self.values)] def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} if self.values: + param_names = self.get_param_names() for i, value in enumerate(self.values): - named_parameters[self._param_names[i]] = value + named_parameters[param_names[i]] = value return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": @@ -449,7 +452,7 @@ def append_to_statement(self, statement: "SQL") -> "SQL": if not self.values: return statement.where(exp.false()) - resolved_names = self._resolve_parameter_conflicts(statement, self._param_names) + resolved_names = self._resolve_parameter_conflicts(statement, self.get_param_names()) placeholder_expressions: list[exp.Expression] = [ exp.Placeholder(this=param_name) for param_name in resolved_names @@ -474,38 +477,40 @@ class NotAnyCollectionFilter(InAnyFilter[T]): Constructs WHERE NOT (column_name = ANY (array_expression)) clauses. """ - __slots__ = ("_param_names", "field_name", "values") + __slots__ = ("_field_name", "_values") - def __init__(self, field_name: str, values: Optional[abc.Collection[T]]) -> None: - """Initialize the NotAnyCollectionFilter. + def __init__(self, field_name: str, values: Optional[abc.Collection[T]] = None) -> None: + self._field_name = field_name + self._values = values - Args: - field_name: Name of the model attribute to filter on. - values: Values for ``NOT (... = ANY (...))`` clause. An empty list will result in a - condition that is always true (all rows returned, filter effectively ignored). - If ``None``, the filter is not applied to the query, and all rows are returned. - """ - self.field_name = field_name - self.values = values + @property + def field_name(self) -> str: + return self._field_name - self._param_names: list[str] = [] - if self.values: - for i, _ in enumerate(self.values): - self._param_names.append(f"{self.field_name}_not_any_{i}") + @property + def values(self) -> Optional[abc.Collection[T]]: + return self._values + + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + if not self.values: + return [] + return [f"{self.field_name}_not_any_{i}" for i, _ in enumerate(self.values)] def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} if self.values: + param_names = self.get_param_names() for i, value in enumerate(self.values): - named_parameters[self._param_names[i]] = value + named_parameters[param_names[i]] = value return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": if self.values is None or not self.values: return statement - resolved_names = self._resolve_parameter_conflicts(statement, self._param_names) + resolved_names = self._resolve_parameter_conflicts(statement, self.get_param_names()) placeholder_expressions: list[exp.Expression] = [ exp.Placeholder(this=param_name) for param_name in resolved_names @@ -541,39 +546,40 @@ class LimitOffsetFilter(PaginationFilter): Adds pagination support through LIMIT/OFFSET SQL clauses. """ - __slots__ = ("_limit_param_name", "_offset_param_name", "limit", "offset") - - limit: int - offset: int + __slots__ = ("_limit", "_offset") def __init__(self, limit: int, offset: int) -> None: - """Initialize the LimitOffsetFilter. + self._limit = limit + self._offset = offset - Args: - limit: Value for ``LIMIT`` clause of query. - offset: Value for ``OFFSET`` clause of query. - """ - self.limit = limit - self.offset = offset + @property + def limit(self) -> int: + return self._limit + + @property + def offset(self) -> int: + return self._offset - self._limit_param_name = "limit" - self._offset_param_name = "offset" + def get_param_names(self) -> list[str]: + """Get parameter names without storing them.""" + return ["limit", "offset"] def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" - return [], {self._limit_param_name: self.limit, self._offset_param_name: self.offset} + param_names = self.get_param_names() + return [], {param_names[0]: self.limit, param_names[1]: self.offset} def append_to_statement(self, statement: "SQL") -> "SQL": - resolved_names = self._resolve_parameter_conflicts(statement, [self._limit_param_name, self._offset_param_name]) + resolved_names = self._resolve_parameter_conflicts(statement, self.get_param_names()) limit_param_name, offset_param_name = resolved_names limit_placeholder = exp.Placeholder(this=limit_param_name) offset_placeholder = exp.Placeholder(this=offset_param_name) try: - current_statement = sqlglot.parse_one(statement._raw_sql, dialect=getattr(statement, "_dialect", None)) + current_statement = sqlglot.parse_one(statement.raw_sql, dialect=statement.dialect) except Exception: - current_statement = exp.Select().from_(f"({statement._raw_sql})") + current_statement = exp.Select().from_(f"({statement.raw_sql})") if isinstance(current_statement, exp.Select): new_statement = current_statement.limit(limit_placeholder).offset(offset_placeholder) @@ -596,20 +602,19 @@ class OrderByFilter(StatementFilter): Adds sorting capability to SQL queries. """ - __slots__ = ("field_name", "sort_order") - - field_name: str - sort_order: Literal["asc", "desc"] + __slots__ = ("_field_name", "_sort_order") def __init__(self, field_name: str, sort_order: Literal["asc", "desc"] = "asc") -> None: - """Initialize the OrderByFilter. + self._field_name = field_name + self._sort_order = sort_order - Args: - field_name: Name of the model attribute to sort on. - sort_order: Sort ascending or descending. - """ - self.field_name = field_name - self.sort_order = sort_order + @property + def field_name(self) -> str: + return self._field_name + + @property + def sort_order(self) -> Literal["asc", "desc"]: + return self._sort_order # pyright: ignore def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" @@ -623,12 +628,12 @@ def append_to_statement(self, statement: "SQL") -> "SQL": col_expr = exp.column(self.field_name) order_expr = col_expr.desc() if converted_sort_order == "desc" else col_expr.asc() - if statement._statement is None: + if statement.statement_expression is None: new_statement = exp.Select().order_by(order_expr) - elif isinstance(statement._statement, exp.Select): - new_statement = statement._statement.order_by(order_expr) + elif isinstance(statement.statement_expression, exp.Select): + new_statement = statement.statement_expression.order_by(order_expr) else: - new_statement = exp.Select().from_(statement._statement).order_by(order_expr) + new_statement = exp.Select().from_(statement.statement_expression).order_by(order_expr) return statement.copy(statement=new_statement) @@ -643,44 +648,48 @@ class SearchFilter(StatementFilter): Constructs WHERE field_name LIKE '%value%' clauses. """ - __slots__ = ("_param_name", "field_name", "ignore_case", "value") - - field_name: Union[str, set[str]] - value: str - ignore_case: Optional[bool] + __slots__ = ("_field_name", "_ignore_case", "_value") def __init__(self, field_name: Union[str, set[str]], value: str, ignore_case: Optional[bool] = False) -> None: - """Initialize the SearchFilter. - - Args: - field_name: Name of the model attribute to search on. - value: Search value. - ignore_case: Should the search be case insensitive. - """ - self.field_name = field_name - self.value = value - self.ignore_case = ignore_case - - self._param_name: Optional[str] = None - if self.value: - if isinstance(self.field_name, str): - self._param_name = f"{self.field_name}_search" - else: - self._param_name = "search_value" + self._field_name = field_name + self._value = value + self._ignore_case = ignore_case + + @property + def field_name(self) -> Union[str, set[str]]: + return self._field_name + + @property + def value(self) -> str: + return self._value + + @property + def ignore_case(self) -> Optional[bool]: + return self._ignore_case + + def get_param_name(self) -> Optional[str]: + """Get parameter name without storing it.""" + if not self.value: + return None + if isinstance(self.field_name, str): + return f"{self.field_name}_search" + return "search_value" def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} - if self.value and self._param_name: + param_name = self.get_param_name() + if self.value and param_name: search_value_with_wildcards = f"%{self.value}%" - named_parameters[self._param_name] = search_value_with_wildcards + named_parameters[param_name] = search_value_with_wildcards return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": - if not self.value or not self._param_name: + param_name = self.get_param_name() + if not self.value or not param_name: return statement - resolved_names = self._resolve_parameter_conflicts(statement, [self._param_name]) + resolved_names = self._resolve_parameter_conflicts(statement, [param_name]) param_name = resolved_names[0] pattern_expr = exp.Placeholder(this=param_name) @@ -717,38 +726,29 @@ class NotInSearchFilter(SearchFilter): Constructs WHERE field_name NOT LIKE '%value%' clauses. """ - __slots__ = () - - def __init__(self, field_name: Union[str, set[str]], value: str, ignore_case: Optional[bool] = False) -> None: - """Initialize the NotInSearchFilter. - - Args: - field_name: Name of the model attribute to search on. - value: Search value. - ignore_case: Should the search be case insensitive. - """ - super().__init__(field_name, value, ignore_case) - - self._param_name: Optional[str] = None - if self.value: - if isinstance(self.field_name, str): - self._param_name = f"{self.field_name}_not_search" - else: - self._param_name = "not_search_value" + def get_param_name(self) -> Optional[str]: + """Get parameter name without storing it.""" + if not self.value: + return None + if isinstance(self.field_name, str): + return f"{self.field_name}_not_search" + return "not_search_value" def extract_parameters(self) -> tuple[list[Any], dict[str, Any]]: """Extract filter parameters.""" named_parameters = {} - if self.value and self._param_name: + param_name = self.get_param_name() + if self.value and param_name: search_value_with_wildcards = f"%{self.value}%" - named_parameters[self._param_name] = search_value_with_wildcards + named_parameters[param_name] = search_value_with_wildcards return [], named_parameters def append_to_statement(self, statement: "SQL") -> "SQL": - if not self.value or not self._param_name: + param_name = self.get_param_name() + if not self.value or not param_name: return statement - resolved_names = self._resolve_parameter_conflicts(statement, [self._param_name]) + resolved_names = self._resolve_parameter_conflicts(statement, [param_name]) param_name = resolved_names[0] pattern_expr = exp.Placeholder(this=param_name) @@ -829,3 +829,36 @@ def apply_filter(statement: "SQL", filter_obj: StatementFilter) -> "SQL": AnyCollectionFilter[Any], NotAnyCollectionFilter[Any], ] + + +def create_filters(filters: "list[StatementFilter]") -> tuple["StatementFilter", ...]: + """Convert mutable filters to immutable tuple. + + Since StatementFilter classes are now immutable (with read-only properties), + we just need to convert to a tuple for consistent sharing. + + Args: + filters: List of StatementFilter objects (already immutable) + + Returns: + Tuple of StatementFilter objects + """ + return tuple(filters) + + +def canonicalize_filters(filters: "list[StatementFilter]") -> tuple["StatementFilter", ...]: + """Sort filters by type and field_name for consistent hashing. + + Args: + filters: List of StatementFilter objects + + Returns: + Canonically sorted tuple of filters + """ + + def sort_key(f: "StatementFilter") -> tuple[str, str]: + class_name = type(f).__name__ + field_name = getattr(f, "field_name", "") + return (class_name, str(field_name)) + + return tuple(sorted(filters, key=sort_key)) diff --git a/sqlspec/core/hashing.py b/sqlspec/core/hashing.py index e396225dd..549752223 100644 --- a/sqlspec/core/hashing.py +++ b/sqlspec/core/hashing.py @@ -185,26 +185,24 @@ def hash_sql_statement(statement: "SQL") -> str: """ from sqlspec.utils.type_guards import is_expression - if is_expression(statement._statement): - expr_hash = hash_expression(statement._statement) - else: - expr_hash = hash(statement._raw_sql) + stmt_expr = statement.statement_expression + expr_hash = hash_expression(stmt_expr) if is_expression(stmt_expr) else hash(statement.raw_sql) param_hash = hash_parameters( - positional_parameters=statement._positional_parameters, - named_parameters=statement._named_parameters, - original_parameters=statement._original_parameters, + positional_parameters=statement.positional_parameters, + named_parameters=statement.named_parameters, + original_parameters=statement.original_parameters, ) - filter_hash = hash_filters(statement._filters) + filter_hash = hash_filters(statement.filters) state_components = [ expr_hash, param_hash, filter_hash, - hash(statement._dialect), - hash(statement._is_many), - hash(statement._is_script), + hash(statement.dialect), + hash(statement.is_many), + hash(statement.is_script), ] return f"sql:{hash(tuple(state_components))}" diff --git a/sqlspec/core/parameters.py b/sqlspec/core/parameters.py index 74301ce29..9c325c9c6 100644 --- a/sqlspec/core/parameters.py +++ b/sqlspec/core/parameters.py @@ -1064,7 +1064,7 @@ def process( return processed_sql, processed_parameters - def _get_sqlglot_compatible_sql( + def get_sqlglot_compatible_sql( self, sql: str, parameters: Any, config: ParameterStyleConfig, dialect: Optional[str] = None ) -> "tuple[str, Any]": """Get SQL normalized for parsing only (Phase 1 only). diff --git a/sqlspec/core/statement.py b/sqlspec/core/statement.py index 39650b508..e039c2b95 100644 --- a/sqlspec/core/statement.py +++ b/sqlspec/core/statement.py @@ -18,6 +18,7 @@ if TYPE_CHECKING: from sqlglot.dialects.dialect import DialectType + from sqlspec.core.cache import FiltersView from sqlspec.core.filters import StatementFilter @@ -161,14 +162,14 @@ def __init__( self._process_parameters(*parameters, **kwargs) def _create_auto_config( - self, statement: "Union[str, exp.Expression, 'SQL']", parameters: tuple, kwargs: dict[str, Any] + self, _statement: "Union[str, exp.Expression, 'SQL']", _parameters: tuple, _kwargs: dict[str, Any] ) -> "StatementConfig": """Create default StatementConfig when none provided. Args: - statement: The SQL statement - parameters: Statement parameters - kwargs: Additional keyword arguments + _statement: The SQL statement (unused) + _parameters: Statement parameters (unused) + _kwargs: Additional keyword arguments (unused) Returns: Default StatementConfig instance @@ -196,14 +197,14 @@ def _init_from_sql_object(self, sql_obj: "SQL") -> None: Args: sql_obj: Existing SQL object to copy from """ - self._raw_sql = sql_obj._raw_sql - self._filters = sql_obj._filters.copy() - self._named_parameters = sql_obj._named_parameters.copy() - self._positional_parameters = sql_obj._positional_parameters.copy() - self._is_many = sql_obj._is_many - self._is_script = sql_obj._is_script - if sql_obj._processed_state is not Empty: - self._processed_state = sql_obj._processed_state + self._raw_sql = sql_obj.raw_sql + self._filters = sql_obj.filters.copy() + self._named_parameters = sql_obj.named_parameters.copy() + self._positional_parameters = sql_obj.positional_parameters.copy() + self._is_many = sql_obj.is_many + self._is_script = sql_obj.is_script + if sql_obj.is_processed: + self._processed_state = sql_obj.get_processed_state() def _should_auto_detect_many(self, parameters: tuple) -> bool: """Detect execute_many mode from parameter structure. @@ -270,6 +271,15 @@ def sql(self) -> str: """Get the raw SQL string.""" return self._raw_sql + @property + def raw_sql(self) -> str: + """Get raw SQL string (public API). + + Returns: + The raw SQL string + """ + return self._raw_sql + @property def parameters(self) -> Any: """Get the original parameters.""" @@ -277,6 +287,21 @@ def parameters(self) -> Any: return self._named_parameters return self._positional_parameters or [] + @property + def positional_parameters(self) -> "list[Any]": + """Get positional parameters (public API).""" + return self._positional_parameters or [] + + @property + def named_parameters(self) -> "dict[str, Any]": + """Get named parameters (public API).""" + return self._named_parameters + + @property + def original_parameters(self) -> Any: + """Get original parameters (public API).""" + return self._original_parameters + @property def operation_type(self) -> "OperationType": """SQL operation type.""" @@ -301,6 +326,25 @@ def filters(self) -> "list[StatementFilter]": """Applied filters.""" return self._filters.copy() + def get_filters_view(self) -> "FiltersView": + """Get zero-copy filters view (public API). + + Returns: + Read-only view of filters without copying + """ + from sqlspec.core.cache import FiltersView + + return FiltersView(self._filters) + + @property + def is_processed(self) -> bool: + """Check if SQL has been processed (public API).""" + return self._processed_state is not Empty + + def get_processed_state(self) -> Any: + """Get processed state (public API).""" + return self._processed_state + @property def dialect(self) -> "Optional[str]": """SQL dialect.""" @@ -311,6 +355,17 @@ def _statement(self) -> "Optional[exp.Expression]": """Internal SQLGlot expression.""" return self.expression + @property + def statement_expression(self) -> "Optional[exp.Expression]": + """Get parsed statement expression (public API). + + Returns: + Parsed SQLGlot expression or None if not parsed + """ + if self._processed_state is not Empty: + return self._processed_state.parsed_expression + return None + @property def is_many(self) -> bool: """Check if this is execute_many.""" diff --git a/sqlspec/driver/_async.py b/sqlspec/driver/_async.py index 2993c2542..422221bd5 100644 --- a/sqlspec/driver/_async.py +++ b/sqlspec/driver/_async.py @@ -186,10 +186,10 @@ async def execute_many( config = statement_config or self.statement_config if isinstance(statement, SQL): - sql_statement = SQL(statement._raw_sql, parameters, statement_config=config, is_many=True, **kwargs) + sql_statement = SQL(statement.raw_sql, parameters, statement_config=config, is_many=True, **kwargs) else: base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs) - sql_statement = SQL(base_statement._raw_sql, parameters, statement_config=config, is_many=True, **kwargs) + sql_statement = SQL(base_statement.raw_sql, parameters, statement_config=config, is_many=True, **kwargs) return await self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) diff --git a/sqlspec/driver/_common.py b/sqlspec/driver/_common.py index c799d3f40..af41eb793 100644 --- a/sqlspec/driver/_common.py +++ b/sqlspec/driver/_common.py @@ -7,7 +7,7 @@ from sqlspec.builder import QueryBuilder from sqlspec.core import SQL, ParameterStyle, SQLResult, Statement, StatementConfig, TypedParameter -from sqlspec.core.cache import get_cache_config, sql_cache +from sqlspec.core.cache import CachedStatement, get_cache, get_cache_config from sqlspec.core.splitter import split_sql_script from sqlspec.exceptions import ImproperConfigurationError from sqlspec.utils.logging import get_logger @@ -206,16 +206,16 @@ def prepare_statement( sql_statement = statement.to_statement(statement_config) if parameters or kwargs: merged_parameters = ( - (*sql_statement._positional_parameters, *parameters) + (*sql_statement.positional_parameters, *parameters) if parameters - else sql_statement._positional_parameters + else sql_statement.positional_parameters ) return SQL(sql_statement.sql, *merged_parameters, statement_config=statement_config, **kwargs) return sql_statement if isinstance(statement, SQL): if parameters or kwargs: merged_parameters = ( - (*statement._positional_parameters, *parameters) if parameters else statement._positional_parameters + (*statement.positional_parameters, *parameters) if parameters else statement.positional_parameters ) return SQL(statement.sql, *merged_parameters, statement_config=statement_config, **kwargs) needs_rebuild = False @@ -232,14 +232,14 @@ def prepare_statement( needs_rebuild = True if needs_rebuild: - sql_text = statement._raw_sql or statement.sql + sql_text = statement.raw_sql or statement.sql if statement.is_many and statement.parameters: new_sql = SQL(sql_text, statement.parameters, statement_config=statement_config, is_many=True) - elif statement._named_parameters: - new_sql = SQL(sql_text, statement_config=statement_config, **statement._named_parameters) + elif statement.named_parameters: + new_sql = SQL(sql_text, statement_config=statement_config, **statement.named_parameters) else: - new_sql = SQL(sql_text, *statement._positional_parameters, statement_config=statement_config) + new_sql = SQL(sql_text, *statement.positional_parameters, statement_config=statement_config) return new_sql return statement @@ -413,9 +413,10 @@ def _get_compiled_sql( cache_key = None if cache_config.compiled_cache_enabled and statement_config.enable_caching: cache_key = self._generate_compilation_cache_key(statement, statement_config, flatten_single_parameters) - cached_result = sql_cache.get(cache_key) - if cached_result is not None: - return cached_result + cache = get_cache() + cached_result = cache.get("statement", cache_key, str(statement.dialect) if statement.dialect else None) + if cached_result is not None and isinstance(cached_result, CachedStatement): + return cached_result.compiled_sql, cached_result.parameters prepared_statement = self.prepare_statement(statement, statement_config=statement_config) compiled_sql, execution_parameters = prepared_statement.compile() @@ -430,7 +431,23 @@ def _get_compiled_sql( ) if cache_key is not None: - sql_cache.set(cache_key, (compiled_sql, prepared_parameters)) + cache = get_cache() + cached_statement = CachedStatement( + compiled_sql=compiled_sql, + parameters=tuple(prepared_parameters) + if isinstance(prepared_parameters, list) + else ( + prepared_parameters + if prepared_parameters is None + else ( + tuple(prepared_parameters) + if not isinstance(prepared_parameters, tuple) + else prepared_parameters + ) + ), + expression=statement.expression, + ) + cache.put("statement", cache_key, cached_statement, str(statement.dialect) if statement.dialect else None) return compiled_sql, prepared_parameters @@ -562,8 +579,8 @@ def _create_count_query(self, original_sql: "SQL") -> "SQL": count_expr.set("limit", None) count_expr.set("offset", None) - return SQL(count_expr, *original_sql._positional_parameters, statement_config=original_sql.statement_config) + return SQL(count_expr, *original_sql.positional_parameters, statement_config=original_sql.statement_config) subquery = cast("exp.Select", expr).subquery(alias="total_query") count_expr = exp.select(exp.Count(this=exp.Star())).from_(subquery) - return SQL(count_expr, *original_sql._positional_parameters, statement_config=original_sql.statement_config) + return SQL(count_expr, *original_sql.positional_parameters, statement_config=original_sql.statement_config) diff --git a/sqlspec/driver/_sync.py b/sqlspec/driver/_sync.py index 40be1aec3..89988711e 100644 --- a/sqlspec/driver/_sync.py +++ b/sqlspec/driver/_sync.py @@ -186,10 +186,10 @@ def execute_many( config = statement_config or self.statement_config if isinstance(statement, SQL): - sql_statement = SQL(statement._raw_sql, parameters, statement_config=config, is_many=True, **kwargs) + sql_statement = SQL(statement.raw_sql, parameters, statement_config=config, is_many=True, **kwargs) else: base_statement = self.prepare_statement(statement, filters, statement_config=config, kwargs=kwargs) - sql_statement = SQL(base_statement._raw_sql, parameters, statement_config=config, is_many=True, **kwargs) + sql_statement = SQL(base_statement.raw_sql, parameters, statement_config=config, is_many=True, **kwargs) return self.dispatch_statement_execution(statement=sql_statement, connection=self.connection) diff --git a/sqlspec/loader.py b/sqlspec/loader.py index 0d837f039..5664a0373 100644 --- a/sqlspec/loader.py +++ b/sqlspec/loader.py @@ -12,7 +12,7 @@ from typing import TYPE_CHECKING, Any, Final, Optional, Union from urllib.parse import unquote, urlparse -from sqlspec.core.cache import CacheKey, get_cache_config, get_default_cache +from sqlspec.core.cache import get_cache, get_cache_config from sqlspec.core.statement import SQL from sqlspec.exceptions import SQLFileNotFoundError, SQLFileParseError, StorageOperationFailedError from sqlspec.storage.registry import storage_registry as default_storage_registry @@ -438,9 +438,8 @@ def _load_single_file(self, file_path: Union[str, Path], namespace: Optional[str return cache_key_str = self._generate_file_cache_key(file_path) - cache_key = CacheKey((cache_key_str,)) - unified_cache = get_default_cache() - cached_file = unified_cache.get(cache_key) + cache = get_cache() + cached_file = cache.get("file", cache_key_str) if ( cached_file is not None @@ -475,7 +474,7 @@ def _load_single_file(self, file_path: Union[str, Path], namespace: Optional[str file_statements[stored_name] = self._queries[query_name] cached_file_data = CachedSQLFile(sql_file=sql_file, parsed_statements=file_statements) - unified_cache.put(cache_key, cached_file_data) + cache.put("file", cache_key_str, cached_file_data) def _load_file_without_cache(self, file_path: Union[str, Path], namespace: Optional[str]) -> None: """Load a single SQL file without using cache. @@ -592,15 +591,15 @@ def clear_cache(self) -> None: cache_config = get_cache_config() if cache_config.compiled_cache_enabled: - unified_cache = get_default_cache() - unified_cache.clear() + cache = get_cache() + cache.clear() def clear_file_cache(self) -> None: """Clear the file cache only, keeping loaded queries.""" cache_config = get_cache_config() if cache_config.compiled_cache_enabled: - unified_cache = get_default_cache() - unified_cache.clear() + cache = get_cache() + cache.clear() def get_query_text(self, name: str) -> str: """Get raw SQL text for a query. diff --git a/sqlspec/storage/backends/fsspec.py b/sqlspec/storage/backends/fsspec.py index 949981786..1f6877bdb 100644 --- a/sqlspec/storage/backends/fsspec.py +++ b/sqlspec/storage/backends/fsspec.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateUsage=false import logging from pathlib import Path from typing import TYPE_CHECKING, Any, Optional, Union diff --git a/tests/integration/test_adapters/test_adbc/test_parameter_styles.py b/tests/integration/test_adapters/test_adbc/test_parameter_styles.py index 1a4289edd..494a9d615 100644 --- a/tests/integration/test_adapters/test_adbc/test_parameter_styles.py +++ b/tests/integration/test_adapters/test_adbc/test_parameter_styles.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Test different parameter styles and None handling for ADBC drivers. This file tests comprehensive None parameter handling for ADBC, diff --git a/tests/integration/test_adapters/test_aiosqlite/test_connection.py b/tests/integration/test_adapters/test_aiosqlite/test_connection.py index 9f229dcfd..1a21644c1 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_connection.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_connection.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Test AIOSQLite connection functionality.""" from __future__ import annotations diff --git a/tests/integration/test_adapters/test_aiosqlite/test_pooling.py b/tests/integration/test_adapters/test_aiosqlite/test_pooling.py index 59ddb01f7..19ad26ab2 100644 --- a/tests/integration/test_adapters/test_aiosqlite/test_pooling.py +++ b/tests/integration/test_adapters/test_aiosqlite/test_pooling.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Integration tests for aiosqlite connection pooling.""" from __future__ import annotations diff --git a/tests/integration/test_adapters/test_duckdb/test_connection.py b/tests/integration/test_adapters/test_duckdb/test_connection.py index 20cbe8985..906e968e0 100644 --- a/tests/integration/test_adapters/test_duckdb/test_connection.py +++ b/tests/integration/test_adapters/test_duckdb/test_connection.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Test DuckDB connection configuration.""" from typing import Any diff --git a/tests/integration/test_adapters/test_sqlite/test_pooling.py b/tests/integration/test_adapters/test_sqlite/test_pooling.py index d9bb5826f..1cad61edf 100644 --- a/tests/integration/test_adapters/test_sqlite/test_pooling.py +++ b/tests/integration/test_adapters/test_sqlite/test_pooling.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Integration tests for SQLite connection pooling.""" import pytest diff --git a/tests/unit/test_adapters/test_async_adapters.py b/tests/unit/test_adapters/test_async_adapters.py index 2837181bf..ed5543e43 100644 --- a/tests/unit/test_adapters/test_async_adapters.py +++ b/tests/unit/test_adapters/test_async_adapters.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Tests for asynchronous database adapters.""" from typing import Any diff --git a/tests/unit/test_adapters/test_sync_adapters.py b/tests/unit/test_adapters/test_sync_adapters.py index 16d03a0bf..dfe8e1be4 100644 --- a/tests/unit/test_adapters/test_sync_adapters.py +++ b/tests/unit/test_adapters/test_sync_adapters.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Tests for synchronous database adapters.""" from typing import Any diff --git a/tests/unit/test_base/test_sql_integration.py b/tests/unit/test_base/test_sql_integration.py index e153fd7a3..e9fc0cd97 100644 --- a/tests/unit/test_base/test_sql_integration.py +++ b/tests/unit/test_base/test_sql_integration.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for SQLSpec SQL loading integration. Tests the integration of SQLFileLoader functionality into the SQLSpec base class, diff --git a/tests/unit/test_base/test_sqlspec_class.py b/tests/unit/test_base/test_sqlspec_class.py index d5b893d6c..1d28893c8 100644 --- a/tests/unit/test_base/test_sqlspec_class.py +++ b/tests/unit/test_base/test_sqlspec_class.py @@ -140,18 +140,16 @@ def test_get_cache_stats_returns_statistics() -> None: """Test that get_cache_stats returns cache statistics.""" stats = SQLSpec.get_cache_stats() - assert hasattr(stats, "sql_hit_rate") - assert hasattr(stats, "fragment_hit_rate") - assert hasattr(stats, "optimized_hit_rate") - assert hasattr(stats, "sql_size") - assert hasattr(stats, "fragment_size") - assert hasattr(stats, "optimized_size") - assert hasattr(stats, "sql_hits") - assert hasattr(stats, "sql_misses") - assert hasattr(stats, "fragment_hits") - assert hasattr(stats, "fragment_misses") - assert hasattr(stats, "optimized_hits") - assert hasattr(stats, "optimized_misses") + assert isinstance(stats, dict) + assert "multi_level" in stats + + multi_stats = stats["multi_level"] + + assert hasattr(multi_stats, "hit_rate") + assert hasattr(multi_stats, "hits") + assert hasattr(multi_stats, "misses") + assert hasattr(multi_stats, "evictions") + assert hasattr(multi_stats, "total_operations") def test_reset_cache_stats_clears_statistics() -> None: @@ -159,12 +157,12 @@ def test_reset_cache_stats_clears_statistics() -> None: SQLSpec.reset_cache_stats() stats = SQLSpec.get_cache_stats() - assert stats.sql_hits == 0 - assert stats.sql_misses == 0 - assert stats.fragment_hits == 0 - assert stats.fragment_misses == 0 - assert stats.optimized_hits == 0 - assert stats.optimized_misses == 0 + multi_stats = stats["multi_level"] + + assert multi_stats.hits == 0 + assert multi_stats.misses == 0 + assert multi_stats.evictions == 0 + assert multi_stats.total_operations == 0 def test_log_cache_stats_logs_to_configured_logger() -> None: @@ -183,23 +181,21 @@ def test_log_cache_stats_logs_to_configured_logger() -> None: assert "Cache Statistics" in call_args[0][0] +@patch("sqlspec.core.cache.get_cache") @patch("sqlspec.core.cache.get_default_cache") -@patch("sqlspec.core.cache.get_statement_cache") -def test_update_cache_config_clears_all_caches( - mock_get_statement_cache: MagicMock, mock_get_default_cache: MagicMock -) -> None: +def test_update_cache_config_clears_all_caches(mock_get_default_cache: MagicMock, mock_get_cache: MagicMock) -> None: """Test that updating cache configuration clears all existing caches.""" mock_default_cache = MagicMock() - mock_statement_cache = MagicMock() + mock_multi_cache = MagicMock() mock_get_default_cache.return_value = mock_default_cache - mock_get_statement_cache.return_value = mock_statement_cache + mock_get_cache.return_value = mock_multi_cache new_config = CacheConfig(sql_cache_size=1000) SQLSpec.update_cache_config(new_config) mock_default_cache.clear.assert_called_once() - mock_statement_cache.clear.assert_called_once() + mock_multi_cache.clear.assert_called_once() def test_multiple_sqlspec_instances_share_cache_configuration() -> None: @@ -291,7 +287,9 @@ def stats_worker() -> None: for _ in range(50): stats = SQLSpec.get_cache_stats() SQLSpec.reset_cache_stats() - results.append(stats.sql_hits + stats.sql_misses) + multi_stats = stats["multi_level"] + total_ops = multi_stats.hits + multi_stats.misses + results.append(total_ops) time.sleep(0.001) except Exception as e: errors.append(e) @@ -555,7 +553,10 @@ def test_statistics_collection_during_configuration_changes() -> None: SQLSpec.update_cache_config(config) stats = SQLSpec.get_cache_stats() - assert hasattr(stats, "sql_hit_rate") + assert isinstance(stats, dict) + assert "multi_level" in stats + multi_stats = stats["multi_level"] + assert hasattr(multi_stats, "hit_rate") SQLSpec.reset_cache_stats() diff --git a/tests/unit/test_core/test_cache.py b/tests/unit/test_core/test_cache.py index 6840adc35..ef071c277 100644 --- a/tests/unit/test_core/test_cache.py +++ b/tests/unit/test_core/test_cache.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for the SQLSpec cache system. This module tests the unified caching system. Tests cover: @@ -5,12 +6,10 @@ 1. CacheKey - Immutable cache keys 2. CacheStats - Cache statistics tracking and monitoring 3. UnifiedCache - Main LRU cache implementation with TTL support -4. StatementCache - Specialized caching for compiled SQL statements -5. ExpressionCache - Specialized caching for parsed SQLGlot expressions -6. ParameterCache - Specialized caching for processed parameters -7. Cache management functions - Global cache management and configuration -8. Thread safety - Concurrent access and operations -9. Performance characteristics - O(1) operations and memory efficiency +4. MultiLevelCache - Namespace-based cache with zero-copy views +5. Cache management functions - Global cache management and configuration +6. Thread safety - Concurrent access and operations +7. Performance characteristics - O(1) operations and memory efficiency The cache system provides thread-safe caching with LRU eviction, TTL-based expiration, and statistics tracking for monitoring @@ -19,7 +18,6 @@ import threading import time -from typing import Any from unittest.mock import MagicMock, patch import pytest @@ -28,22 +26,16 @@ CacheConfig, CacheKey, CacheStats, - CacheStatsAggregate, - ExpressionCache, - ParameterCache, - StatementCache, + MultiLevelCache, UnifiedCache, clear_all_caches, + get_cache, get_cache_config, get_cache_statistics, get_cache_stats, get_default_cache, - get_expression_cache, - get_parameter_cache, - get_statement_cache, log_cache_stats, reset_cache_stats, - sql_cache, update_cache_config, ) @@ -335,227 +327,61 @@ def test_unified_cache_statistics_tracking() -> None: assert stats.evictions == 1 -def test_statement_cache_initialization() -> None: - """Test StatementCache initialization.""" - stmt_cache = StatementCache(max_size=100) - - assert isinstance(stmt_cache._cache, UnifiedCache) - stats = stmt_cache.get_stats() - assert stats.hits == 0 - assert stats.misses == 0 - - -@patch("sqlspec.core.statement.SQL") -def test_statement_cache_compiled_storage_and_retrieval(mock_sql: MagicMock) -> None: - """Test storing and retrieving compiled SQL statements.""" - stmt_cache = StatementCache() - - mock_statement = MagicMock() - mock_statement._raw_sql = "SELECT * FROM users WHERE id = ?" - mock_statement.dialect = None - mock_statement.is_many = False - mock_statement.is_script = False - mock_sql.return_value = mock_statement +def test_multi_level_cache_statement_operations() -> None: + """Test MultiLevelCache statement namespace operations.""" + cache = get_cache() + cache_key = "SELECT * FROM users WHERE id = ?" compiled_sql = "SELECT * FROM users WHERE id = $1" parameters = ["param1"] - stmt_cache.put_compiled(mock_statement, compiled_sql, parameters) + cache_value = (compiled_sql, parameters) + + cache.put("statement", cache_key, cache_value) - result = stmt_cache.get_compiled(mock_statement) + result = cache.get("statement", cache_key) assert result is not None assert result[0] == compiled_sql assert result[1] == parameters + cache.delete("statement", cache_key) + assert cache.get("statement", cache_key) is None -@patch("sqlspec.core.statement.SQL") -def test_statement_cache_key_generation(mock_sql: MagicMock) -> None: - """Test cache key generation for SQL statements.""" - stmt_cache = StatementCache() - - mock_statement1 = MagicMock() - mock_statement1._raw_sql = "SELECT * FROM users" - mock_statement1.dialect = "postgresql" - mock_statement1.is_many = False - mock_statement1.is_script = False - - def _hash1(self: Any) -> int: - return hash("statement1") - - mock_statement1.__hash__ = _hash1 # pyright: ignore[reportAttributeAccessIssue] - - mock_statement2 = MagicMock() - mock_statement2._raw_sql = "SELECT * FROM orders" - mock_statement2.dialect = "postgresql" - mock_statement2.is_many = False - mock_statement2.is_script = False - - def _hash2(self: Any) -> int: - return hash("statement2") - - mock_statement2.__hash__ = _hash2 # pyright: ignore[reportAttributeAccessIssue] - - key1 = stmt_cache._create_statement_key(mock_statement1) - key2 = stmt_cache._create_statement_key(mock_statement2) - - assert key1 != key2 - assert isinstance(key1, CacheKey) - assert isinstance(key2, CacheKey) - - -def test_statement_cache_clear_operation() -> None: - """Test clearing statement cache.""" - stmt_cache = StatementCache() - - test_key = CacheKey(("test", "data")) - stmt_cache._cache.put(test_key, ("SELECT 1", [])) - - assert stmt_cache._cache.size() == 1 - stmt_cache.clear() - assert stmt_cache._cache.size() == 0 - - -def test_expression_cache_initialization() -> None: - """Test ExpressionCache initialization.""" - expr_cache = ExpressionCache(max_size=50) - - assert isinstance(expr_cache._cache, UnifiedCache) - stats = expr_cache.get_stats() - assert stats.hits == 0 - - -def test_expression_cache_key_generation() -> None: - """Test cache key generation for expressions.""" - expr_cache = ExpressionCache() - - sql1 = "SELECT * FROM users" - dialect1 = "postgresql" - key1 = expr_cache._create_expression_key(sql1, dialect1) - - sql2 = "SELECT * FROM orders" - dialect2 = "postgresql" - key2 = expr_cache._create_expression_key(sql2, dialect2) - - sql3 = sql1 - dialect3 = "mysql" - key3 = expr_cache._create_expression_key(sql3, dialect3) - - assert key1 != key2 - - assert key1 != key3 - - -def test_expression_cache_storage_and_retrieval() -> None: - """Test storing and retrieving parsed expressions.""" - expr_cache = ExpressionCache() +def test_multi_level_cache_expression_operations() -> None: + """Test MultiLevelCache expression namespace operations.""" + cache = get_cache() sql = "SELECT * FROM users WHERE id = 1" dialect = "postgresql" + cache_key = f"{sql}::{dialect}" mock_expression = MagicMock() mock_expression.sql.return_value = sql - expr_cache.put_expression(sql, mock_expression, dialect) + cache.put("expression", cache_key, mock_expression) - result = expr_cache.get_expression(sql, dialect) + result = cache.get("expression", cache_key) assert result is mock_expression - result_different = expr_cache.get_expression(sql, "mysql") - assert result_different is None - - -def test_expression_cache_clear_operation() -> None: - """Test clearing expression cache.""" - expr_cache = ExpressionCache() + result_missing = cache.get("expression", "missing_key") + assert result_missing is None - sql = "SELECT 1" - expr_cache.put_expression(sql, MagicMock()) - assert expr_cache._cache.size() == 1 - expr_cache.clear() - assert expr_cache._cache.size() == 0 - - -def test_parameter_cache_initialization() -> None: - """Test ParameterCache initialization.""" - param_cache = ParameterCache(max_size=200) - - assert isinstance(param_cache._cache, UnifiedCache) - stats = param_cache.get_stats() - assert stats.hits == 0 - - -def test_parameter_cache_key_generation_dict_params() -> None: - """Test cache key generation for dictionary parameters.""" - param_cache = ParameterCache() - - params1 = {"user_id": 1, "name": "John"} - config_hash1 = hash("config1") - key1 = param_cache._create_parameter_key(params1, config_hash1) - - params2 = {"user_id": 2, "name": "Jane"} - config_hash2 = hash("config1") - key2 = param_cache._create_parameter_key(params2, config_hash2) - - params3 = params1 - config_hash3 = hash("config2") - key3 = param_cache._create_parameter_key(params3, config_hash3) - - assert key1 != key2 - - assert key1 != key3 - - -def test_parameter_cache_key_generation_list_params() -> None: - """Test cache key generation for list/tuple parameters.""" - param_cache = ParameterCache() - - params1 = [1, 2, 3] - params2 = (1, 2, 3) - config_hash = hash("config") - - key1 = param_cache._create_parameter_key(params1, config_hash) - key2 = param_cache._create_parameter_key(params2, config_hash) - - assert key1 == key2 - - -def test_parameter_cache_key_generation_unhashable_params() -> None: - """Test cache key generation for unhashable parameters.""" - param_cache = ParameterCache() - - params = [[1, 2], [3, 4]] - config_hash = hash("config") - - key = param_cache._create_parameter_key(params, config_hash) - assert isinstance(key, CacheKey) - - -def test_parameter_cache_storage_and_retrieval() -> None: - """Test storing and retrieving processed parameters.""" - param_cache = ParameterCache() +def test_multi_level_cache_parameter_operations() -> None: + """Test MultiLevelCache parameter namespace operations.""" + cache = get_cache() original_params = {"user_id": 1, "name": "John"} processed_params = [1, "John"] config_hash = hash("config") + cache_key = f"{hash(str(original_params))}::{config_hash}" - param_cache.put_parameters(original_params, processed_params, config_hash) + cache.put("parameter", cache_key, processed_params) - result = param_cache.get_parameters(original_params, config_hash) + result = cache.get("parameter", cache_key) assert result == processed_params - result_different = param_cache.get_parameters(original_params, hash("different_config")) - assert result_different is None - - -def test_parameter_cache_clear_operation() -> None: - """Test clearing parameter cache.""" - param_cache = ParameterCache() - - param_cache.put_parameters({"test": 1}, [1], hash("config")) - assert param_cache._cache.size() == 1 - - param_cache.clear() - assert param_cache._cache.size() == 0 + cache.delete("parameter", cache_key) + assert cache.get("parameter", cache_key) is None def test_get_default_cache_singleton() -> None: @@ -567,75 +393,45 @@ def test_get_default_cache_singleton() -> None: assert isinstance(cache1, UnifiedCache) -def test_get_statement_cache_singleton() -> None: - """Test that get_statement_cache returns the same instance.""" - cache1 = get_statement_cache() - cache2 = get_statement_cache() - - assert cache1 is cache2 - assert isinstance(cache1, StatementCache) - - -def test_get_expression_cache_singleton() -> None: - """Test that get_expression_cache returns the same instance.""" - cache1 = get_expression_cache() - cache2 = get_expression_cache() +def test_get_cache_singleton() -> None: + """Test that get_cache returns the same instance.""" + cache1 = get_cache() + cache2 = get_cache() assert cache1 is cache2 - assert isinstance(cache1, ExpressionCache) - - -def test_get_parameter_cache_singleton() -> None: - """Test that get_parameter_cache returns the same instance.""" - cache1 = get_parameter_cache() - cache2 = get_parameter_cache() - - assert cache1 is cache2 - assert isinstance(cache1, ParameterCache) + assert isinstance(cache1, MultiLevelCache) def test_clear_all_caches_function() -> None: """Test clearing all global cache instances.""" default_cache = get_default_cache() - stmt_cache = get_statement_cache() - expr_cache = get_expression_cache() - param_cache = get_parameter_cache() + multi_cache = get_cache() test_key = CacheKey(("test",)) default_cache.put(test_key, "test_value") - stmt_cache._cache.put(test_key, ("SELECT 1", [])) - expr_cache._cache.put(test_key, MagicMock()) - param_cache._cache.put(test_key, [1, 2, 3]) + multi_cache.put("test", "key1", "value1") assert default_cache.size() > 0 - assert stmt_cache._cache.size() > 0 - assert expr_cache._cache.size() > 0 - assert param_cache._cache.size() > 0 + assert multi_cache.get("test", "key1") == "value1" clear_all_caches() assert default_cache.size() == 0 - assert stmt_cache._cache.size() == 0 - assert expr_cache._cache.size() == 0 - assert param_cache._cache.size() == 0 + assert multi_cache.get("test", "key1") is None def test_get_cache_statistics_function() -> None: """Test getting statistics from all cache instances.""" get_default_cache() - get_statement_cache() - get_expression_cache() - get_parameter_cache() + get_cache() stats_dict = get_cache_statistics() assert isinstance(stats_dict, dict) assert "default" in stats_dict - assert "statement" in stats_dict - assert "expression" in stats_dict - assert "parameter" in stats_dict + assert "multi_level" in stats_dict for stats in stats_dict.values(): assert isinstance(stats, CacheStats) @@ -693,55 +489,52 @@ def test_update_cache_config_function() -> None: update_cache_config(original_config) -def test_cache_stats_aggregate_initialization() -> None: - """Test CacheStatsAggregate initialization.""" - stats = CacheStatsAggregate() +def test_multi_level_cache_namespace_isolation() -> None: + """Test that different namespaces in MultiLevelCache are isolated.""" + cache = get_cache() + + cache.put("statement", "key1", "value1") + cache.put("expression", "key1", "value2") + cache.put("parameter", "key1", "value3") - assert stats.sql_hit_rate == 0.0 - assert stats.fragment_hit_rate == 0.0 - assert stats.optimized_hit_rate == 0.0 - assert stats.sql_size == 0 - assert stats.fragment_size == 0 - assert stats.optimized_size == 0 - assert stats.sql_hits == 0 - assert stats.sql_misses == 0 - assert stats.fragment_hits == 0 - assert stats.fragment_misses == 0 - assert stats.optimized_hits == 0 - assert stats.optimized_misses == 0 + assert cache.get("statement", "key1") == "value1" + assert cache.get("expression", "key1") == "value2" + assert cache.get("parameter", "key1") == "value3" + + cache.delete("statement", "key1") + assert cache.get("statement", "key1") is None + assert cache.get("expression", "key1") == "value2" + assert cache.get("parameter", "key1") == "value3" def test_get_cache_stats_aggregation() -> None: """Test cache statistics aggregation.""" - reset_cache_stats() stats = get_cache_stats() - assert isinstance(stats, CacheStatsAggregate) - - assert stats.sql_hits == 0 - assert stats.sql_misses == 0 + assert isinstance(stats, dict) + assert "default" in stats + assert "multi_level" in stats def test_reset_cache_stats_function() -> None: """Test resetting all cache statistics.""" - default_cache = get_default_cache() - stmt_cache = get_statement_cache() + multi_cache = get_cache() test_key = CacheKey(("test",)) default_cache.get(test_key) - stmt_cache._cache.get(test_key) + multi_cache.get("test", "key") reset_cache_stats() default_stats = default_cache.get_stats() - stmt_stats = stmt_cache.get_stats() + multi_stats = multi_cache.get_stats() assert default_stats.hits == 0 assert default_stats.misses == 0 - assert stmt_stats.hits == 0 - assert stmt_stats.misses == 0 + assert multi_stats.hits == 0 + assert multi_stats.misses == 0 def test_log_cache_stats_function() -> None: @@ -756,17 +549,18 @@ def test_log_cache_stats_function() -> None: mock_logger.info.assert_called_once() -def test_sql_cache_interface() -> None: - """Test SQL compilation cache interface for compatibility.""" - cache_key = "test_sql_cache_key" +def test_multi_level_cache_interface() -> None: + """Test multi-level cache interface.""" + cache = get_cache() + cache_key = "test_cache_key" cache_value = ("SELECT * FROM users WHERE id = $1", [1]) - sql_cache.set(cache_key, cache_value) + cache.put("statement", cache_key, cache_value, "postgres") - result = sql_cache.get(cache_key) + result = cache.get("statement", cache_key, "postgres") assert result == cache_value - result_none = sql_cache.get("non_existent_key") + result_none = cache.get("statement", "non_existent_key", "postgres") assert result_none is None diff --git a/tests/unit/test_core/test_compiler.py b/tests/unit/test_core/test_compiler.py index 3e520322e..b8ae21c95 100644 --- a/tests/unit/test_core/test_compiler.py +++ b/tests/unit/test_core/test_compiler.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for the core.compiler module. This module tests the SQLProcessor and CompiledSQL classes. diff --git a/tests/unit/test_core/test_hashing.py b/tests/unit/test_core/test_hashing.py index 1d037610b..8264e95f0 100644 --- a/tests/unit/test_core/test_hashing.py +++ b/tests/unit/test_core/test_hashing.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Tests for sqlspec.utils.statement_hashing module. Tests for SQL statement and expression hashing utilities used for cache key generation. @@ -282,15 +283,15 @@ def test_hash_sql_statement_basic() -> None: """Test hash_sql_statement with basic SQL statement.""" statement = Mock() - statement._statement = parse_one("SELECT 1") - statement._raw_sql = "SELECT 1" - statement._positional_parameters = None - statement._named_parameters = None - statement._original_parameters = None - statement._filters = None - statement._dialect = "sqlite" - statement._is_many = False - statement._is_script = False + statement.statement_expression = parse_one("SELECT 1") + statement.raw_sql = "SELECT 1" + statement.positional_parameters = [] + statement.named_parameters = {} + statement.original_parameters = None + statement.filters = [] + statement.dialect = "sqlite" + statement.is_many = False + statement.is_script = False result = hash_sql_statement(statement) assert isinstance(result, str) @@ -300,15 +301,15 @@ def test_hash_sql_statement_basic() -> None: def test_hash_sql_statement_with_parameters() -> None: """Test hash_sql_statement with parameters.""" statement = Mock() - statement._statement = parse_one("SELECT * FROM users WHERE id = ?") - statement._raw_sql = "SELECT * FROM users WHERE id = ?" - statement._positional_parameters = [123] - statement._named_parameters = {"user_id": 123} - statement._original_parameters = [123] - statement._filters = None - statement._dialect = "sqlite" - statement._is_many = False - statement._is_script = False + statement.statement_expression = parse_one("SELECT * FROM users WHERE id = ?") + statement.raw_sql = "SELECT * FROM users WHERE id = ?" + statement.positional_parameters = [123] + statement.named_parameters = {"user_id": 123} + statement.original_parameters = [123] + statement.filters = [] + statement.dialect = "sqlite" + statement.is_many = False + statement.is_script = False result = hash_sql_statement(statement) assert isinstance(result, str) @@ -318,15 +319,15 @@ def test_hash_sql_statement_with_parameters() -> None: def test_hash_sql_statement_raw_sql_fallback() -> None: """Test hash_sql_statement falls back to raw SQL when expression not available.""" statement = Mock() - statement._statement = "SELECT 1" - statement._raw_sql = "SELECT 1" - statement._positional_parameters = None - statement._named_parameters = None - statement._original_parameters = None - statement._filters = None - statement._dialect = "sqlite" - statement._is_many = False - statement._is_script = False + statement.statement_expression = "SELECT 1" + statement.raw_sql = "SELECT 1" + statement.positional_parameters = [] + statement.named_parameters = {} + statement.original_parameters = None + statement.filters = [] + statement.dialect = "sqlite" + statement.is_many = False + statement.is_script = False with pytest.MonkeyPatch().context() as m: m.setattr("sqlspec.utils.type_guards.is_expression", lambda x: False) @@ -521,13 +522,13 @@ def test_error_handling() -> None: """Test error handling in hash functions.""" malformed_statement = Mock() - malformed_statement._positional_parameters = None - malformed_statement._named_parameters = None - malformed_statement._original_parameters = None - malformed_statement._filters = None - malformed_statement._dialect = "sqlite" - malformed_statement._is_many = False - malformed_statement._is_script = False + malformed_statement.positional_parameters = [] + malformed_statement.named_parameters = {} + malformed_statement.original_parameters = None + malformed_statement.filters = [] + malformed_statement.dialect = "sqlite" + malformed_statement.is_many = False + malformed_statement.is_script = False try: hash_sql_statement(malformed_statement) diff --git a/tests/unit/test_core/test_statement.py b/tests/unit/test_core/test_statement.py index e82e6b70d..dc2bb600c 100644 --- a/tests/unit/test_core/test_statement.py +++ b/tests/unit/test_core/test_statement.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for sqlspec.core.statement module. This test module validates the SQL class and StatementConfig implementations. diff --git a/tests/unit/test_loader/test_cache_integration.py b/tests/unit/test_loader/test_cache_integration.py index 6332f75af..7ff61b766 100644 --- a/tests/unit/test_loader/test_cache_integration.py +++ b/tests/unit/test_loader/test_cache_integration.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for cache integration in SQL loader. Tests cache integration with architecture including: @@ -48,7 +50,7 @@ def test_cache_disabled_loading(mock_load_without_cache: Mock, mock_get_cache_co @patch("sqlspec.loader.get_cache_config") -@patch("sqlspec.loader.get_default_cache") +@patch("sqlspec.loader.get_cache") def test_cache_enabled_loading(mock_get_cache: Mock, mock_get_cache_config: Mock) -> None: """Test loading when cache is enabled.""" @@ -133,13 +135,16 @@ def mock_cache_setup() -> Generator[tuple[Mock, Mock, SQLFileLoader], None, None """Set up mock cache infrastructure for testing.""" with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, ): mock_cache_config = Mock() mock_cache_config.compiled_cache_enabled = True mock_config.return_value = mock_cache_config mock_cache = Mock() + mock_cache.get = Mock() + mock_cache.put = Mock() + mock_cache.clear = Mock() mock_cache_factory.return_value = mock_cache loader = SQLFileLoader() @@ -169,6 +174,8 @@ def test_cache_hit_scenario(mock_cache_setup: tuple[Mock, Mock, SQLFileLoader]) loader._load_single_file(tf.name, None) mock_cache.get.assert_called_once() + call_args = mock_cache.get.call_args + assert call_args[0][0] == "file" # First arg should be "file" namespace mock_cache.put.assert_not_called() @@ -195,8 +202,12 @@ def test_cache_miss_scenario(mock_cache_setup: tuple[Mock, Mock, SQLFileLoader]) loader._load_single_file(tf.name, None) mock_cache.get.assert_called_once() + get_call_args = mock_cache.get.call_args + assert get_call_args[0][0] == "file" # First arg should be "file" namespace mock_cache.put.assert_called_once() + put_call_args = mock_cache.put.call_args + assert put_call_args[0][0] == "file" # First arg should be "file" namespace assert "new_query" in loader._queries @@ -318,7 +329,7 @@ def test_namespace_handling_in_cache() -> None: with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, ): mock_cache_config = Mock() mock_cache_config.compiled_cache_enabled = True @@ -334,7 +345,8 @@ def test_namespace_handling_in_cache() -> None: mock_cache.put.assert_called() cache_call_args = mock_cache.put.call_args[0] - cached_data = cache_call_args[1] + assert cache_call_args[0] == "file" # First arg should be "file" namespace + cached_data = cache_call_args[2] # Third arg is the value in MultiLevelCache.put assert isinstance(cached_data, CachedSQLFile) @@ -365,7 +377,7 @@ def test_cache_restoration_with_namespace() -> None: with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, patch("sqlspec.loader.SQLFileLoader._is_file_unchanged", return_value=True), ): mock_cache_config = Mock() @@ -388,7 +400,7 @@ def test_cache_clear_integration() -> None: with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, ): mock_cache_config = Mock() mock_cache_config.compiled_cache_enabled = True @@ -414,7 +426,7 @@ def test_file_cache_only_clear() -> None: with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, ): mock_cache_config = Mock() mock_cache_config.compiled_cache_enabled = True @@ -462,7 +474,7 @@ def test_cache_sharing_between_loaders() -> None: with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, ): mock_cache_config = Mock() mock_cache_config.compiled_cache_enabled = True @@ -581,7 +593,7 @@ def test_cache_hit_performance_benefit() -> None: with ( patch("sqlspec.loader.get_cache_config") as mock_config, - patch("sqlspec.loader.get_default_cache") as mock_cache_factory, + patch("sqlspec.loader.get_cache") as mock_cache_factory, ): mock_cache_config = Mock() mock_cache_config.compiled_cache_enabled = True diff --git a/tests/unit/test_loader/test_sql_file_loader.py b/tests/unit/test_loader/test_sql_file_loader.py index 07a9d2357..7168bdf74 100644 --- a/tests/unit/test_loader/test_sql_file_loader.py +++ b/tests/unit/test_loader/test_sql_file_loader.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for SQLFileLoader class. Tests for SQLFileLoader core functionality including: diff --git a/tests/unit/test_migrations/test_migration.py b/tests/unit/test_migrations/test_migration.py index 40c9c7122..66198f4bd 100644 --- a/tests/unit/test_migrations/test_migration.py +++ b/tests/unit/test_migrations/test_migration.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for Migration class functionality. Tests for Migration core functionality including: diff --git a/tests/unit/test_migrations/test_migration_commands.py b/tests/unit/test_migrations/test_migration_commands.py index a726bad5b..3adfc375b 100644 --- a/tests/unit/test_migrations/test_migration_commands.py +++ b/tests/unit/test_migrations/test_migration_commands.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for migration commands functionality. Tests focused on MigrationCommands class behavior including: diff --git a/tests/unit/test_migrations/test_migration_execution.py b/tests/unit/test_migrations/test_migration_execution.py index cb79b3837..61887f9fe 100644 --- a/tests/unit/test_migrations/test_migration_execution.py +++ b/tests/unit/test_migrations/test_migration_execution.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for migration execution. Tests migration execution including: diff --git a/tests/unit/test_migrations/test_migration_runner.py b/tests/unit/test_migrations/test_migration_runner.py index 2bd325c68..a0370f4de 100644 --- a/tests/unit/test_migrations/test_migration_runner.py +++ b/tests/unit/test_migrations/test_migration_runner.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for MigrationRunner functionality. Tests for MigrationRunner core functionality including: diff --git a/tests/unit/test_storage/test_local_store.py b/tests/unit/test_storage/test_local_store.py index 56e6bd93f..2ce2465a6 100644 --- a/tests/unit/test_storage/test_local_store.py +++ b/tests/unit/test_storage/test_local_store.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for LocalStore backend.""" import tempfile diff --git a/tests/unit/test_storage/test_storage_registry.py b/tests/unit/test_storage/test_storage_registry.py index 259db3a61..3f311c3ca 100644 --- a/tests/unit/test_storage/test_storage_registry.py +++ b/tests/unit/test_storage/test_storage_registry.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Unit tests for StorageRegistry.""" import tempfile diff --git a/tests/unit/test_utils/test_fixtures.py b/tests/unit/test_utils/test_fixtures.py index 864caf44c..0c4eeeadd 100644 --- a/tests/unit/test_utils/test_fixtures.py +++ b/tests/unit/test_utils/test_fixtures.py @@ -1,3 +1,4 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Tests for sqlspec.utils.fixtures module. Tests fixture loading utilities including synchronous and asynchronous diff --git a/tests/unit/test_utils/test_singleton.py b/tests/unit/test_utils/test_singleton.py index 3df0ac98e..3dad2e49a 100644 --- a/tests/unit/test_utils/test_singleton.py +++ b/tests/unit/test_utils/test_singleton.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Tests for sqlspec.utils.singleton module. Tests singleton pattern implementation using metaclass. diff --git a/tests/unit/test_utils/test_sync_tools.py b/tests/unit/test_utils/test_sync_tools.py index dfac1df09..6a5c5070b 100644 --- a/tests/unit/test_utils/test_sync_tools.py +++ b/tests/unit/test_utils/test_sync_tools.py @@ -1,3 +1,5 @@ +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false +# pyright: reportPrivateImportUsage = false, reportPrivateUsage = false """Tests for sqlspec.utils.sync_tools module. Tests synchronization tools including async/sync conversion utilities, diff --git a/tests/unit/test_utils/test_type_guards.py b/tests/unit/test_utils/test_type_guards.py index 89deed84b..e5a258453 100644 --- a/tests/unit/test_utils/test_type_guards.py +++ b/tests/unit/test_utils/test_type_guards.py @@ -5,13 +5,11 @@ """ from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Optional, cast +from typing import Any, Optional, cast import msgspec import pytest - -if TYPE_CHECKING: - from sqlglot import exp +from sqlglot import exp from sqlspec.utils.type_guards import ( dataclass_to_dict, @@ -64,6 +62,8 @@ pytestmark = pytest.mark.xdist_group("utils") +_UNSET = object() + @dataclass class SampleDataclass: @@ -75,23 +75,36 @@ class SampleDataclass: class MockSQLGlotExpression: - """Mock SQLGlot expression for testing.""" + """Mock SQLGlot expression for testing type guard functions. + + This mock allows us to test cases where attributes don't exist, + which is needed to test the AttributeError handling in type guards. + """ def __init__( self, - this: "Optional[Any]" = None, - expressions: "Optional[list[Any]]" = None, - parent: "Optional[Any]" = None, + this: Any = _UNSET, + expressions: Any = _UNSET, + parent: Any = _UNSET, args: "Optional[dict[str, Any]]" = None, ) -> None: - if this is not None: + # Only set attributes if they were explicitly provided + if this is not _UNSET: self.this = this - if expressions is not None: + if expressions is not _UNSET: self.expressions = expressions - if parent is not None: + if parent is not _UNSET: self.parent = parent + + # SQLGlot expressions always have an args dict self.args = args or {} + # Set any additional attributes from args + if args: + for key, value in args.items(): + if key not in {"this", "expressions", "parent"}: + setattr(self, key, value) + class MockLiteral: """Mock literal for testing.""" @@ -611,7 +624,7 @@ def __init__(self) -> None: self.initial_expression = mock_expr context = MockContext() - assert get_initial_expression(context) is mock_expr + assert get_initial_expression(context) is mock_expr # type: ignore[comparison-overlap] def test_get_initial_expression_without_attribute() -> None: diff --git a/uv.lock b/uv.lock index a037de03a..66dfa55f5 100644 --- a/uv.lock +++ b/uv.lock @@ -12,110 +12,114 @@ resolution-markers = [ [[package]] name = "adbc-driver-bigquery" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "adbc-driver-manager" }, { name = "importlib-resources" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/57/614aae90b81995766b5257f4e931c3b8622456cfcac3573c6f6fd05214c5/adbc_driver_bigquery-1.7.0.tar.gz", hash = "sha256:41869135374d6d21d8437f9f5850ad1c420a41a9dc9ae70cfb3e70d65505899e", size = 19259, upload-time = "2025-07-07T06:23:07.37Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/1c/fd4e1c9bc4d15a284a59832233df9bcc86cde017c1c75d21f8c921830d07/adbc_driver_bigquery-1.8.0.tar.gz", hash = "sha256:0b55e857a8fd470bfd8890dd882d0e32d31102ba5b5f6c840e9214326926b686", size = 19228, upload-time = "2025-09-12T12:31:22.413Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/d8/6f97f74582af9cef89614ddd8ef8053c953e40359190834c1c098b54886a/adbc_driver_bigquery-1.7.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:23209198ed92088e3dc8929f01b862b8c155e1c3e5887cf682893b0902f825e6", size = 9418295, upload-time = "2025-07-07T06:21:37.471Z" }, - { url = "https://files.pythonhosted.org/packages/70/eb/b16286208c9189158b460a81fd39090533510450ffc9070e820cd57d2028/adbc_driver_bigquery-1.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6e5b8ac4c09b9bcc0bd5315eb94ec6768c88a3a74a725b597dedba6516222e76", size = 8897027, upload-time = "2025-07-07T06:21:40.114Z" }, - { url = "https://files.pythonhosted.org/packages/1e/94/5211a8ea70793be1a9871f8c54317a7e250108b161d6cab921b9f4ca2a42/adbc_driver_bigquery-1.7.0-py3-none-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1d6a95b760fffe46cdf078d4e23dcb519a7eb2e7d13a8805fd4e2d2f0a6dd28", size = 9443348, upload-time = "2025-07-07T06:21:42.533Z" }, - { url = "https://files.pythonhosted.org/packages/59/bc/06117ddbe4ea3ecb49904d1a79513b3c2755a6eb906ec07919d199c93be8/adbc_driver_bigquery-1.7.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:458f2f51721b638d98f1883c3bfcb18d5a83c26882bab0a37331628248f3b4eb", size = 8681765, upload-time = "2025-07-07T06:21:44.712Z" }, - { url = "https://files.pythonhosted.org/packages/cd/f6/0432f7dc0aa4d1c6207578db9154850055e0696108d707c8591b31b56f9d/adbc_driver_bigquery-1.7.0-py3-none-win_amd64.whl", hash = "sha256:119240f8346d86035e0b08285a608f7b89a65c92e599e58342e156fe1e59b079", size = 17530223, upload-time = "2025-07-07T06:21:47.886Z" }, + { url = "https://files.pythonhosted.org/packages/8b/61/d3305955169cafcfd918437a73de497d6636d14475d162442ae69e3f45fa/adbc_driver_bigquery-1.8.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:6d13ac05b71999cd7d5cc9bff22cbd0469e13665e7a404bcfc534096c2fa27b9", size = 9490322, upload-time = "2025-09-12T12:29:04.824Z" }, + { url = "https://files.pythonhosted.org/packages/aa/bb/1a66ef3c40091b2b7f2289a5573b1a23f0fb0769f2b2e283272d43349690/adbc_driver_bigquery-1.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:59b64ad4390c8d8d94321dbf1d1c3a460b23597cf397ba9d65bcfb2edecd8062", size = 8961861, upload-time = "2025-09-12T12:29:09.258Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e0/831606b509df1028fcac9abe56b36201e50e93b600b4f3512c77a1beae7e/adbc_driver_bigquery-1.8.0-py3-none-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8760955803ed12097ce88a33c2d8d94e75d65e4ef8f695003b80d4e61275a269", size = 9516364, upload-time = "2025-09-12T12:29:14.252Z" }, + { url = "https://files.pythonhosted.org/packages/4f/30/f71012a91f75f39f4bc88c6cc4552073df092d07af0eb35ac4dc1a899016/adbc_driver_bigquery-1.8.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a5908d2d32d6a6fe626900ba5d5fa2757f43d3223ead12d21c73162be1445fda", size = 8746559, upload-time = "2025-09-12T12:29:18.71Z" }, + { url = "https://files.pythonhosted.org/packages/5e/a2/6f2ad307b3fc6d2c315405025a8aa2de21579e54afd48bcc2fced720b478/adbc_driver_bigquery-1.8.0-py3-none-win_amd64.whl", hash = "sha256:add664b7998a83fffa334e2c92f504d0c6921d5f9e420d351d880da80646ce03", size = 17658500, upload-time = "2025-09-12T12:29:22.847Z" }, ] [[package]] name = "adbc-driver-flightsql" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "adbc-driver-manager" }, { name = "importlib-resources" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/d4/ebd3eed981c771565677084474cdf465141455b5deb1ca409c616609bfd7/adbc_driver_flightsql-1.7.0.tar.gz", hash = "sha256:5dca460a2c66e45b29208eaf41a7206f252177435fa48b16f19833b12586f7a0", size = 21247, upload-time = "2025-07-07T06:23:08.186Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/c7/8646301ac48142cd9c437c9ee56aaaf15f39bee41c80dba5f7d882f2d48f/adbc_driver_flightsql-1.8.0.tar.gz", hash = "sha256:5ca2c4928221ab2779a7be601375e96b9204a009ab1d1f91a862e1d860f918a6", size = 21221, upload-time = "2025-09-12T12:31:23.125Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/20/807fca9d904b7e0d3020439828d6410db7fd7fd635824a80cab113d9fad1/adbc_driver_flightsql-1.7.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:a5658f9bc3676bd122b26138e9b9ce56b8bf37387efe157b4c66d56f942361c6", size = 7749664, upload-time = "2025-07-07T06:21:50.742Z" }, - { url = "https://files.pythonhosted.org/packages/cd/e6/9e50f6497819c911b9cc1962ffde610b60f7d8e951d6bb3fa145dcfb50a7/adbc_driver_flightsql-1.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:65e21df86b454d8db422c8ee22db31be217d88c42d9d6dd89119f06813037c91", size = 7302476, upload-time = "2025-07-07T06:21:52.441Z" }, - { url = "https://files.pythonhosted.org/packages/27/82/e51af85e7cc8c87bc8ce4fae8ca7ee1d3cf39c926be0aeab789cedc93f0a/adbc_driver_flightsql-1.7.0-py3-none-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3282fdc7b73c712780cc777975288c88b1e3a555355bbe09df101aa954f8f105", size = 7686056, upload-time = "2025-07-07T06:21:54.101Z" }, - { url = "https://files.pythonhosted.org/packages/8b/c9/591c8ecbaf010ba3f4b360db602050ee5880cd077a573c9e90fcb270ab71/adbc_driver_flightsql-1.7.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e0c5737ae6ee3bbfba44dcbc28ba1ff8cf3ab6521888c4b0f10dd6a482482161", size = 7050275, upload-time = "2025-07-07T06:21:56.179Z" }, - { url = "https://files.pythonhosted.org/packages/10/14/f339e9a5d8dbb3e3040215514cea9cca0a58640964aaccc6532f18003a03/adbc_driver_flightsql-1.7.0-py3-none-win_amd64.whl", hash = "sha256:f8b5290b322304b7d944ca823754e6354c1868dbbe94ddf84236f3e0329545da", size = 14312858, upload-time = "2025-07-07T06:21:58.165Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3d/862f1d3717462700517e44cda0e486b9614d4131e978b437ea276523e020/adbc_driver_flightsql-1.8.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:81f2a2764f7abfee3f50153ee15979ab8d1fb288c521984f1c286a70bf4712a9", size = 7807606, upload-time = "2025-09-12T12:29:26.227Z" }, + { url = "https://files.pythonhosted.org/packages/25/cc/5ac43f1690d29e18b2763c2b0ec7553f0b986bba820ca7beda103838702c/adbc_driver_flightsql-1.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e688e1292aaa56fd1508453eb826d53d8ea21668af503c0cb0988cf1cbc83015", size = 7358553, upload-time = "2025-09-12T12:29:29.017Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a4/c2aedeb081e44771f5be24720636dd36483ba325055cd2196e051b366907/adbc_driver_flightsql-1.8.0-py3-none-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:632408dae8e2dc24028982936937f1db39afff45b33840e7e8787d8878549756", size = 7745209, upload-time = "2025-09-12T12:29:31.858Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/875210dcbd33bdfd0607e8253a23b05cc89afcc03a230347c6e344e2894c/adbc_driver_flightsql-1.8.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:685fc873826fe30ea8e29e94d8868938ad31df48b781bdc44adf42e176fa36ad", size = 7107135, upload-time = "2025-09-12T12:29:34.337Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/740c90e01fa659c630f8c011464cd5ba86299bf06e54fa03979ecc1967b3/adbc_driver_flightsql-1.8.0-py3-none-win_amd64.whl", hash = "sha256:7eaa25ade42aa2cedd6c261c71c7d141857b91020d8bddf08e64c9f36541cc29", size = 14428790, upload-time = "2025-09-12T12:29:37.362Z" }, ] [[package]] name = "adbc-driver-manager" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/bf/2986a2cd3e1af658d2597f7e2308564e5c11e036f9736d5c256f1e00d578/adbc_driver_manager-1.7.0.tar.gz", hash = "sha256:e3edc5d77634b5925adf6eb4fbcd01676b54acb2f5b1d6864b6a97c6a899591a", size = 198128, upload-time = "2025-07-07T06:23:08.913Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/24/38/2c2e0b4dd406ba90802c132a03b169ba4d016d1f524b44ee250d500af4d6/adbc_driver_manager-1.7.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a90d7bd45ff021821c556c34ac3e98bf38a4a8f463c6823215cdf0c044c8d324", size = 519893, upload-time = "2025-07-07T06:22:00.311Z" }, - { url = "https://files.pythonhosted.org/packages/64/0f/1173abfd48bd387d23f7dc7d5766ef553ae41ffb3e39b164d553c7266350/adbc_driver_manager-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f49003e56eaad48c30bb49da97d50a746b610a90a21252ae4f4c48ec0ccc9b49", size = 506039, upload-time = "2025-07-07T06:22:01.922Z" }, - { url = "https://files.pythonhosted.org/packages/ad/a0/d928ba5fa41ecd955ca0e4a9537d0a70217a08be436ea864b464f12e4c49/adbc_driver_manager-1.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e868c188bd755e924ed2496d5f4ddede26945939c20b6f9dd964de823fcb7767", size = 2911082, upload-time = "2025-07-07T06:22:03.501Z" }, - { url = "https://files.pythonhosted.org/packages/a1/eb/8a0f39a685496eeea829794a8e6045b6c3e67139a0dff23752037df46b10/adbc_driver_manager-1.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:160654d58240e32a0fd6906acf619623e74b1120a7842e9cfb8c3996e9a7d3f2", size = 2924944, upload-time = "2025-07-07T06:22:04.869Z" }, - { url = "https://files.pythonhosted.org/packages/e0/9c/a9f68675a04139d482bcb80a816966ca2ee69204574e041c935ce13e01b2/adbc_driver_manager-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:27b45a97fbfce81bd0621d20d337fbb08fe9358928ba1d13dc760f4efa463109", size = 696641, upload-time = "2025-07-07T06:22:06.151Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e0/197fee9a9c35bb1f44d91cebcac8991716ece61c432d6c89d909cf57a9bd/adbc_driver_manager-1.7.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:bc6aac15a980b2849d5121f1c3aab3b8ef51a8b1ab1865872b0decc278ca2aea", size = 524489, upload-time = "2025-07-07T06:22:07.287Z" }, - { url = "https://files.pythonhosted.org/packages/45/07/f5061c0852e73f796d422fa6366f9d2384246ff2eab660b45287f4389961/adbc_driver_manager-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26b4a0c8e243d9558a73afc4fa83e62aa79f3873401c3d74028a30d4989f2dbb", size = 511071, upload-time = "2025-07-07T06:22:08.403Z" }, - { url = "https://files.pythonhosted.org/packages/59/d4/468c8027c5de2d7d6b46ba52762df83ed62726014347a17ca27502eaf317/adbc_driver_manager-1.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44f0e424d450c7c5f9175788b87a1277680f5a1bee35706de72d5a74b27e773e", size = 2988591, upload-time = "2025-07-07T06:22:09.582Z" }, - { url = "https://files.pythonhosted.org/packages/da/47/eec4738b9a427258d29a4499b5c38266d68c8a4d638ee809ab2857f8f159/adbc_driver_manager-1.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:886707c162950356deff644f1dc492ad438dea1b661c7024861fc3511e59e182", size = 2996720, upload-time = "2025-07-07T06:22:11.318Z" }, - { url = "https://files.pythonhosted.org/packages/95/bb/59987660a3f3eac23f65844a37568fdd435e8eddb474f1adbfe1f19491ad/adbc_driver_manager-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:b6e856f39852270d4a90f1b21ed6504e2f56b049f9b201b3fb6bf33b939e2b56", size = 698428, upload-time = "2025-07-07T06:22:12.803Z" }, - { url = "https://files.pythonhosted.org/packages/74/3a/72bd9c45d55f1f5f4c549e206de8cfe3313b31f7b95fbcb180da05c81044/adbc_driver_manager-1.7.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:8da1ac4c19bcbf30b3bd54247ec889dfacc9b44147c70b4da79efe2e9ba93600", size = 524210, upload-time = "2025-07-07T06:22:13.927Z" }, - { url = "https://files.pythonhosted.org/packages/33/29/e1a8d8dde713a287f8021f3207127f133ddce578711a4575218bdf78ef27/adbc_driver_manager-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:408bc23bad1a6823b364e2388f85f96545e82c3b2db97d7828a4b94839d3f29e", size = 505902, upload-time = "2025-07-07T06:22:15.071Z" }, - { url = "https://files.pythonhosted.org/packages/59/00/773ece64a58c0ade797ab4577e7cdc4c71ebf800b86d2d5637e3bfe605e9/adbc_driver_manager-1.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cf38294320c23e47ed3455348e910031ad8289c3f9167ae35519ac957b7add01", size = 2974883, upload-time = "2025-07-07T06:22:16.358Z" }, - { url = "https://files.pythonhosted.org/packages/7c/ad/1568da6ae9ab70983f1438503d3906c6b1355601230e891d16e272376a04/adbc_driver_manager-1.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:689f91b62c18a9f86f892f112786fb157cacc4729b4d81666db4ca778eade2a8", size = 2997781, upload-time = "2025-07-07T06:22:17.767Z" }, - { url = "https://files.pythonhosted.org/packages/19/66/2b6ea5afded25a3fa009873c2bbebcd9283910877cc10b9453d680c00b9a/adbc_driver_manager-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:f936cfc8d098898a47ef60396bd7a73926ec3068f2d6d92a2be4e56e4aaf3770", size = 690041, upload-time = "2025-07-07T06:22:20.384Z" }, - { url = "https://files.pythonhosted.org/packages/b2/3b/91154c83a98f103a3d97c9e2cb838c3842aef84ca4f4b219164b182d9516/adbc_driver_manager-1.7.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:ab9ee36683fd54f61b0db0f4a96f70fe1932223e61df9329290370b145abb0a9", size = 522737, upload-time = "2025-07-07T06:22:21.505Z" }, - { url = "https://files.pythonhosted.org/packages/9c/52/4bc80c3388d5e2a3b6e504ba9656dd9eb3d8dbe822d07af38db1b8c96fb1/adbc_driver_manager-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4ec03d94177f71a8d3a149709f4111e021f9950229b35c0a803aadb1a1855a4b", size = 503896, upload-time = "2025-07-07T06:22:22.629Z" }, - { url = "https://files.pythonhosted.org/packages/e1/f3/46052ca11224f661cef4721e19138bc73e750ba6aea54f22606950491606/adbc_driver_manager-1.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:700c79dac08a620018c912ede45a6dc7851819bc569a53073ab652dc0bd0c92f", size = 2972586, upload-time = "2025-07-07T06:22:23.835Z" }, - { url = "https://files.pythonhosted.org/packages/a2/22/44738b41bb5ca30f94b5f4c00c71c20be86d7eb4ddc389d4cf3c7b8b69ef/adbc_driver_manager-1.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98db0f5d0aa1635475f63700a7b6f677390beb59c69c7ba9d388bc8ce3779388", size = 2992001, upload-time = "2025-07-07T06:22:25.156Z" }, - { url = "https://files.pythonhosted.org/packages/1b/2b/5184fe5a529feb019582cc90d0f65e0021d52c34ca20620551532340645a/adbc_driver_manager-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:4b7e5e9a163acb21804647cc7894501df51cdcd780ead770557112a26ca01ca6", size = 688789, upload-time = "2025-07-07T06:22:26.591Z" }, - { url = "https://files.pythonhosted.org/packages/3f/e0/b283544e1bb7864bf5a5ac9cd330f111009eff9180ec5000420510cf9342/adbc_driver_manager-1.7.0-cp313-cp313t-macosx_10_15_x86_64.whl", hash = "sha256:ac83717965b83367a8ad6c0536603acdcfa66e0592d783f8940f55fda47d963e", size = 538625, upload-time = "2025-07-07T06:22:27.751Z" }, - { url = "https://files.pythonhosted.org/packages/77/5a/dc244264bd8d0c331a418d2bdda5cb6e26c30493ff075d706aa81d4e3b30/adbc_driver_manager-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4c234cf81b00eaf7e7c65dbd0f0ddf7bdae93dfcf41e9d8543f9ecf4b10590f6", size = 523627, upload-time = "2025-07-07T06:22:29.186Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ff/a499a00367fd092edb20dc6e36c81e3c7a437671c70481cae97f46c8156a/adbc_driver_manager-1.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ad8aa4b039cc50722a700b544773388c6b1dea955781a01f79cd35d0a1e6edbf", size = 3037517, upload-time = "2025-07-07T06:22:30.391Z" }, - { url = "https://files.pythonhosted.org/packages/25/6e/9dfdb113294dcb24b4f53924cd4a9c9af3fbe45a9790c1327048df731246/adbc_driver_manager-1.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4409ff53578e01842a8f57787ebfbfee790c1da01a6bd57fcb7701ed5d4dd4f7", size = 3016543, upload-time = "2025-07-07T06:22:31.914Z" }, - { url = "https://files.pythonhosted.org/packages/01/7e/9fa1f66da19df2b2fcdc5ff62fabc9abc0d5c6433a1f30cc4435d968be91/adbc_driver_manager-1.7.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:956a1e882871e65393de65e2b0f73557fe4673d178ce78a4916daf692b18d38f", size = 521715, upload-time = "2025-07-07T06:22:33.239Z" }, - { url = "https://files.pythonhosted.org/packages/2a/69/03a57826224d6a3ca7fbc8fa85070952d29833a741f9f1c95ed8952e4901/adbc_driver_manager-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b23791c3841e1f9f4477306561d46cb5e65c014146debb2ec8c84316bbf9c45f", size = 507821, upload-time = "2025-07-07T06:22:34.36Z" }, - { url = "https://files.pythonhosted.org/packages/4a/96/67b616981f6de21b962815b54cf115b400283fdcf179a834beaf3ae3095c/adbc_driver_manager-1.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e1cf8c03b943534af2d95fd2127c072cbacfb5dbed1d01c9ae9189576b2e9b6", size = 2907402, upload-time = "2025-07-07T06:22:35.483Z" }, - { url = "https://files.pythonhosted.org/packages/09/64/5f1d23d622d7cbea6484647fb4048b92cff3ed5413e7b11c5c5ed09f03b2/adbc_driver_manager-1.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a7b5b3ae67838155aaf7ce5df247a847236bafcadfc9642efb4e63238d730385", size = 2921491, upload-time = "2025-07-07T06:22:37.238Z" }, - { url = "https://files.pythonhosted.org/packages/f8/68/76a3691e0a7d1d2a698ceb1b007bf780b2d42ec082eb1e4737566ec72434/adbc_driver_manager-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:bb11e0af7844e344a117626664def42ac5a2a94f82296f9a3f4d01ac14545052", size = 698860, upload-time = "2025-07-07T06:22:38.508Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/55/2a/00fe4974b7d134c8d0691a87f09460d949e607e1ef65a022c665e8bde64f/adbc_driver_manager-1.8.0.tar.gz", hash = "sha256:88ca0f4d8c02fc6859629acaf0504620da17a39549e64d4098a3497f7f1eb2d0", size = 203568, upload-time = "2025-09-12T12:31:24.233Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/00/5c30fbb6c218599b9d6ee29df6e999c144f792b5790da31a23d6513bde83/adbc_driver_manager-1.8.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:fe3a1beb0f603468e3c4e7c03fccab1af584b6b606ab9707a168d17b7bab01a7", size = 533919, upload-time = "2025-09-12T12:29:40.317Z" }, + { url = "https://files.pythonhosted.org/packages/af/cc/6a0bb6c858ee8316d510b1c9d184cd348b98c4cffd212e79072bf44dd436/adbc_driver_manager-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a9bba93fe8bba7f8c23ad2db0e1441fcd9672f3d900c2791437ee8058bfa6a70", size = 511549, upload-time = "2025-09-12T12:29:42.263Z" }, + { url = "https://files.pythonhosted.org/packages/91/61/742daad0325a1ad97602bc12a5dadb15ac73e7b7db20f2caf0a66e87ef45/adbc_driver_manager-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18ce935cc2220b3df065dd98b049beec1c9abacd79ed6f7dfea953d9c3e9404b", size = 3023642, upload-time = "2025-09-12T12:29:44.874Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/02f5ce9da49961f97c3ee184f42feb8f9bf5e77c80cacc3fe42a81b11325/adbc_driver_manager-1.8.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c01c66c37e3e97d8891bb217f2d2f6c33c6cd25bf799aefcb42ed99c76a6ed36", size = 3039802, upload-time = "2025-09-12T12:29:46.576Z" }, + { url = "https://files.pythonhosted.org/packages/07/8b/affdc2ab3baf6c68b7642e0246861b1db01a28cc33245ddf2ea26dbff7cb/adbc_driver_manager-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:66c7d1319c78fc66f09532f21bc9baf0435a787f1db17b99c46c9a820b9c9253", size = 710628, upload-time = "2025-09-12T12:29:47.735Z" }, + { url = "https://files.pythonhosted.org/packages/4d/0c/2bb08c26a551aae886289fab8ab6d1bf03f4bef5b74632123500a2bc6662/adbc_driver_manager-1.8.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:58c10f81134bf8a528fab3848ac14447f3fe158d9fbc84197e79a24827f94f2a", size = 537727, upload-time = "2025-09-12T12:29:50.082Z" }, + { url = "https://files.pythonhosted.org/packages/a9/67/f2e1694875ccbc72c15c334e1ef2f4338b4cb098ba217f4e535d92d5d2f7/adbc_driver_manager-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59794ae27eef7a17be5583d46b746749b3cbae5e58b0fe0f44746e8498d6f5c", size = 516680, upload-time = "2025-09-12T12:29:52.51Z" }, + { url = "https://files.pythonhosted.org/packages/f5/7d/65a41108cb3c1a87e570cf80a50ca94521f748a58780a41d61ea1d946051/adbc_driver_manager-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fed9a2cb46602cff67f503bbf55c6ee2e69a7e5c07a08514b5bd27a656a3e40b", size = 3103357, upload-time = "2025-09-12T12:29:55.226Z" }, + { url = "https://files.pythonhosted.org/packages/43/15/6e22524aadc7ea82c0868492cdf7e28ab30b476edd5d3d6ef29a882775ec/adbc_driver_manager-1.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:349fecd931e5211f00ce00d109fc80a484046fe41644aa402b97496919aa8c2a", size = 3113074, upload-time = "2025-09-12T12:29:57.453Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a1/05f66007556623a7fb37af6535fe19377d2f4757bf0c94f64f350521c9dc/adbc_driver_manager-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:92105ae34a42603c7f64b4b0f2d851380c018e9c9f4e9a764a01b1b6f1fa6156", size = 712252, upload-time = "2025-09-12T12:29:59.162Z" }, + { url = "https://files.pythonhosted.org/packages/19/c7/05b5559eff9a42c53c47d86e32aa0b15bd206ef4be04f3a678da7871a8dd/adbc_driver_manager-1.8.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:0e6bbe0b026a17c69c1e7410a8df2366bb80803be0f0d8a7eed2defbed313a65", size = 537879, upload-time = "2025-09-12T12:30:00.798Z" }, + { url = "https://files.pythonhosted.org/packages/25/f0/d7ed70a28933e2c6b95455306c005d9022fc558e26e759ed65fce0537b79/adbc_driver_manager-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e5f0f89d13b8f86dc20522988caceab37085fe155ebbea4e9013a7962170011c", size = 512702, upload-time = "2025-09-12T12:30:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/fc66e7b72857589ba5cdd0dcfc388ea746ed805caf4031580b1c065481fa/adbc_driver_manager-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd11c6ecdc8119641d2a929e50c9f6ff822b322859bf08a085e7ba9d1adb399", size = 3086175, upload-time = "2025-09-12T12:30:04.491Z" }, + { url = "https://files.pythonhosted.org/packages/e7/90/4780e8cab75f11644d260a73307445254288405352a99cfb3b2889c50e80/adbc_driver_manager-1.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7689b0cf30d77532189b30762e3f6a347275e57e511e885f0eba45ce40ce02c", size = 3113622, upload-time = "2025-09-12T12:30:06.665Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b4/ed76afa37c344395a33d1f894dcd82b5cee2281925c235405a9078d10a29/adbc_driver_manager-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:3f0454ec6fc2b5d3c3629b504ee65dbded2516412647070e26cdc9c14341ac74", size = 703323, upload-time = "2025-09-12T12:30:07.984Z" }, + { url = "https://files.pythonhosted.org/packages/56/79/76d505f43c6195920a41f812192bbd5fb1a490ade1c81fe5ba9f07a86f23/adbc_driver_manager-1.8.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:04e0676f7bd16dd7d7c403f506b7a22a542fe89f4471526c82cfd546353b125f", size = 536549, upload-time = "2025-09-12T12:30:09.513Z" }, + { url = "https://files.pythonhosted.org/packages/9f/1b/61e9badd21f0936a43692275f84dbf4baa4f39d4100042a14edbf9654a4d/adbc_driver_manager-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dddf0ae5b8d636015b1f7fc6972167c1824bd950f3ed6a178d083e89dfd322a", size = 510497, upload-time = "2025-09-12T12:30:10.837Z" }, + { url = "https://files.pythonhosted.org/packages/9c/52/501e0d11b2ba9fca1eb2698cb56ff14c94e8a1cad421a9c90c2e23edfbd8/adbc_driver_manager-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d70431e659e8e51d222fa238410085f0c13921154e0a17e9a687f7896667138f", size = 3085322, upload-time = "2025-09-12T12:30:12.893Z" }, + { url = "https://files.pythonhosted.org/packages/38/5e/0a79d48fe44cc8387221fff44dfa956c5ce6131a72f08e393748cbb090e0/adbc_driver_manager-1.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8b4d34618a5e64e678210dfdf76704f11e09529fc221dbd576ead6c14555883d", size = 3107704, upload-time = "2025-09-12T12:30:14.861Z" }, + { url = "https://files.pythonhosted.org/packages/71/42/689194767d6ec09bb9b9216c27000ff193199c9bd7d7d5c6c5aad1bc2400/adbc_driver_manager-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:82da1442b6d786d2f87ac0f3dd0bbc7462ec90cb3316168a4db88044d470baa2", size = 702235, upload-time = "2025-09-12T12:30:24.469Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4e98be65dab4e61c9c0227c4908ab9a5db1db320eec8badfd5b253c5854b/adbc_driver_manager-1.8.0-cp313-cp313t-macosx_10_15_x86_64.whl", hash = "sha256:bc1677c06998361b5c3237d9f408b69fb23942f7157e2dd4ce515f658a60d3d4", size = 551974, upload-time = "2025-09-12T12:30:16.782Z" }, + { url = "https://files.pythonhosted.org/packages/8f/4a/c4d83125e1dc0532006b3fd3c816a2c2956dedb881a89e0cb47f4eda1bcc/adbc_driver_manager-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:86cb394bdd3ac298761e0ff8ceab8ad9c2f6ce5650d7f4ac7c8609bc74876929", size = 529497, upload-time = "2025-09-12T12:30:18.756Z" }, + { url = "https://files.pythonhosted.org/packages/c7/6c/d1752ed66109fe1866d9aabe0f6a930b8443d8e62d17f333a38b97b37b85/adbc_driver_manager-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1a834f2f269285d1308aa97ae6000002acdb79d70733735f16b3c9918ca88c1f", size = 3148300, upload-time = "2025-09-12T12:30:21.301Z" }, + { url = "https://files.pythonhosted.org/packages/3d/59/971e28a01382590ead8352d83a2d77b1f8beb2c4cc1b59036e1b68fd59e1/adbc_driver_manager-1.8.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fcf38cc4b993336f49b6d1e407d4741ed1ea898f58088314005f8da7daf47db", size = 3134384, upload-time = "2025-09-12T12:30:23.252Z" }, + { url = "https://files.pythonhosted.org/packages/54/4e/0f826b68d5e0d50f8b1207514d0d17bf60663b7d51efd21f3754b5885450/adbc_driver_manager-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f954783e306ff1e1602d8390e74e00357142c382bff22ab159e8f94a95c8cfcb", size = 3082317, upload-time = "2025-09-12T12:30:26.8Z" }, + { url = "https://files.pythonhosted.org/packages/da/bf/ce5efe35be83b652e4b6059cfff48b59d648560a9dc99caac8da0a3441cd/adbc_driver_manager-1.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d5ec92af49a76345db1ae0a3890789797078b5b9948d550a47e8cfaa27cc19", size = 3089760, upload-time = "2025-09-12T12:30:28.772Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/d3254595b61890da1dc6d44178abe10262136d20aeffae4a86d3e289371e/adbc_driver_manager-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4f68df12cfbffaf4bec832ed406fb6ce978fd7dba8a4e8e377c9658fcd83b6a3", size = 3147028, upload-time = "2025-09-12T12:30:30.53Z" }, + { url = "https://files.pythonhosted.org/packages/68/ba/82d1f9521bc755d8d0d66eaac47032e147c2fe850eb308ba613710b27493/adbc_driver_manager-1.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a4402633d548e3ecdcf89a7133fd72b88a807a3c438e13bdb61ccc79d6239a65", size = 3133693, upload-time = "2025-09-12T12:30:32.357Z" }, + { url = "https://files.pythonhosted.org/packages/a5/33/5016dffbf2bdfcf181c17db5cae0f9fb4bee34605c87d1a3894e8963f888/adbc_driver_manager-1.8.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:151e21b46dedbbd48be4c7d904efd08fcdce3c1db7faff1ce32c520f3a4ed508", size = 535678, upload-time = "2025-09-12T12:30:33.87Z" }, + { url = "https://files.pythonhosted.org/packages/41/08/d089492c2df0d66f87c16a4223f98cd9e04571c55ba3d2147c25ef6f9d57/adbc_driver_manager-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a1c839a4b8c7a19d56bc0592596b123ecbdf6e76e28c7db28e562b6ce47f67cf", size = 512661, upload-time = "2025-09-12T12:30:35.604Z" }, + { url = "https://files.pythonhosted.org/packages/5c/56/5024e4da87544d4cf04df4c1f8231c9e91b9b818dd5fc208a5944455dafc/adbc_driver_manager-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eda25c53cec08290ba8c58f18dbec07ff21b0480e5e0641acc2410f79e477031", size = 3020784, upload-time = "2025-09-12T12:30:37.58Z" }, + { url = "https://files.pythonhosted.org/packages/66/22/d299a8a6aa0a51eecbe0c052aa457c24fbd499c9c096de889c40e7fb1a46/adbc_driver_manager-1.8.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c0d7fedaec1ecc1079c19eb0b55bd28e10f68f5c76fd523a37498588b7450ecf", size = 3037489, upload-time = "2025-09-12T12:30:39.838Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/ab055f5680f7b9dc2019303526f13c1db6a844d03fbaaa36cd36baa2348c/adbc_driver_manager-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:07188498dec41bd93753a2ad568dbca779e83f56a4e0339dbfc9cf75bc2e5f01", size = 712651, upload-time = "2025-09-12T12:30:41.658Z" }, ] [[package]] name = "adbc-driver-postgresql" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "adbc-driver-manager" }, { name = "importlib-resources" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/90/b70479b8808cc9fc9df3e26262a3197a38418477d6c729358db8f2a424ff/adbc_driver_postgresql-1.7.0.tar.gz", hash = "sha256:2c624446e855f12d3236211c33ffbd9d04b113e8879dd9fb64e8df52af760d36", size = 20366, upload-time = "2025-07-07T06:23:10.086Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/3a/3873d398f2df59bd1b20c803a24ef51068586554ea85ec8db6905f6ee639/adbc_driver_postgresql-1.8.0.tar.gz", hash = "sha256:66689c5616e41229c53ef222f63b60841f05b11610e60fb9029e54ac500e6d0d", size = 20306, upload-time = "2025-09-12T12:31:25.277Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/95/57ba30e2a1083427b52886d0df88e4f2475430a46526500fa797469991c6/adbc_driver_postgresql-1.7.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:5ed0925aa60db1af83a3ac3b6dbf28301f7e958e32bc2fac38c88e87f037d216", size = 2690330, upload-time = "2025-07-07T06:22:40.016Z" }, - { url = "https://files.pythonhosted.org/packages/0a/94/e0885a8d81293a03bb827598eec2b6bd287910a5c80f6fdc97d60b8e33ee/adbc_driver_postgresql-1.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f90f3b09ff3515c7a7717cb1ff277d7b475c176d11ae7eb81b9a29a69a3822ae", size = 3003864, upload-time = "2025-07-07T06:22:41.532Z" }, - { url = "https://files.pythonhosted.org/packages/6c/38/76ae713aa626edef081c69c29b6be209e1d509e7979283a371013ba25f45/adbc_driver_postgresql-1.7.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6672a693b55c96a31927565bd77f055a8e7d85e60afd64e9c798a9091ebf8f84", size = 3195576, upload-time = "2025-07-07T06:22:43.084Z" }, - { url = "https://files.pythonhosted.org/packages/58/15/86561628738161017273d9a689e9405e4ea9a9d41a70fd2460dbc5d646ae/adbc_driver_postgresql-1.7.0-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:da257df9e168e05f3a13b9da974d58b5580c70dc881f9f100c80f789e0cb336b", size = 2852984, upload-time = "2025-07-07T06:22:44.49Z" }, - { url = "https://files.pythonhosted.org/packages/c5/56/30541cff717853151bb53c9b27602251795c22043c8b5c4615139b3228cb/adbc_driver_postgresql-1.7.0-py3-none-win_amd64.whl", hash = "sha256:db46e26dc0462d20a2508d5925dd9d22bfb248eb9982ed0be4ba45b90d7ebef6", size = 2860197, upload-time = "2025-07-07T06:22:45.936Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e9/2c68074a173fdaa69028f170317144607e1c6bd26dd343e014b1935ffc12/adbc_driver_postgresql-1.8.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:1f155941e8b7b75210f78a128758b5e12a45c370d462ea0da42e7763b1e3e84e", size = 2691625, upload-time = "2025-09-12T12:30:43.672Z" }, + { url = "https://files.pythonhosted.org/packages/04/50/880b39754cf3b590e37f940dcfe45e72de18c8363fbc510fb22a26274e9c/adbc_driver_postgresql-1.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:22e11fe708303753e3bcac7798f4dc0f4a110db2b7447fddaf811b2d7af026ca", size = 3003079, upload-time = "2025-09-12T12:30:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/c0/75/fe2923c934dea56a05e331469c60bcac4558e656ccd4f1b2ecc252297ca6/adbc_driver_postgresql-1.8.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bed9d730905fddd61712fcad3954ccb7342c83a7f81bc51265eb33b1b83c5b6c", size = 3196334, upload-time = "2025-09-12T12:30:47.925Z" }, + { url = "https://files.pythonhosted.org/packages/36/43/5bb16e9220b23a21692e60c9f036c0e79b4f78409109df6c72b4b4abc945/adbc_driver_postgresql-1.8.0-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ef2fb1f60ef0e4195ddae0b8d52a5dd7f31d2b7d29ca88db1a805736ff5fbd05", size = 2855368, upload-time = "2025-09-12T12:30:51.127Z" }, + { url = "https://files.pythonhosted.org/packages/7a/36/2383ecf8888a77108b4cee249ee105d303851f9a08356fcc66d43bfbbc7c/adbc_driver_postgresql-1.8.0-py3-none-win_amd64.whl", hash = "sha256:08b78dd96d72d3855eb967bd46a7ca5e4fbc0b75c2a9fea6281d95cc6e934a8f", size = 2975792, upload-time = "2025-09-12T12:30:53.118Z" }, ] [[package]] name = "adbc-driver-sqlite" -version = "1.7.0" +version = "1.8.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "adbc-driver-manager" }, { name = "importlib-resources" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/38/44291d3945b6a50bab8f581c08830e0c62bbffd010321f64ac2f339cba24/adbc_driver_sqlite-1.7.0.tar.gz", hash = "sha256:138869e6476d69444b68da6215e4ceca506ca635497e6bccb661f11daa8e4bf6", size = 18363, upload-time = "2025-07-07T06:23:11.563Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/5f/2a6f0b00672e20406532f3b9b0cd1ec4345af17eb9c3a1e496b02cc02c44/adbc_driver_sqlite-1.8.0.tar.gz", hash = "sha256:a48c40a2ba2e33b73df9f2b93ed375e72d71d754035574d0d194125fed39d98c", size = 18309, upload-time = "2025-09-12T12:31:27.833Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/af/102923d3eeb45d0dcfb570dec1760a495793feade885897495b05fd7db3c/adbc_driver_sqlite-1.7.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:efec1bea04089ced1699b76b6b2f87e0df4dcb9a7fe51ab651fac18006483354", size = 1042451, upload-time = "2025-07-07T06:23:01.059Z" }, - { url = "https://files.pythonhosted.org/packages/8e/c0/6d5dc345f757e767d772e18120613118d74777773221b93318edb4fe0930/adbc_driver_sqlite-1.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ae01974e5b98f7244ddc463504af15d9ff00a59dfb3984e27b4ba23647ee1a37", size = 1012753, upload-time = "2025-07-07T06:23:02.467Z" }, - { url = "https://files.pythonhosted.org/packages/f4/70/fde26a1562d87f8c1458dfc0a82181e914dd9fc3f1ca0d423c39f80136d6/adbc_driver_sqlite-1.7.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bdf5bc90d20b48f90627b500e075f38819816012881a888ad6e24d41f5a54ac3", size = 956900, upload-time = "2025-07-07T06:23:03.665Z" }, - { url = "https://files.pythonhosted.org/packages/93/1f/618d88542ca66baf6bc25a3e5ecbd698eff31b12b2ab2a590bae8d9d8c83/adbc_driver_sqlite-1.7.0-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2b3aba1b27ec9cc5761cfe4a870839a6e313e6f580f9f673fbec72299b76fa7d", size = 978150, upload-time = "2025-07-07T06:23:04.835Z" }, - { url = "https://files.pythonhosted.org/packages/b4/18/c857aecc1b80c02bb0b9af8464ef7c250caab2a0120a68f56b4501db32f6/adbc_driver_sqlite-1.7.0-py3-none-win_amd64.whl", hash = "sha256:d70f05a1d737ac477564e8810985101d6e8c6e632f790e396531ece8d3a93248", size = 867977, upload-time = "2025-07-07T06:23:06.155Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/b40ce37ecae79ab74d5bcf62700d0abcd2ea57e3a2be41e5ca7b2af9ea6d/adbc_driver_sqlite-1.8.0-py3-none-macosx_10_15_x86_64.whl", hash = "sha256:fbfac5011e4d743044a95f0befbf2c2f3afc4c4fb61bb4184bf0e5a6e7362d74", size = 1043934, upload-time = "2025-09-12T12:31:14.218Z" }, + { url = "https://files.pythonhosted.org/packages/51/bb/14d27d8765f3aba2c84176beb00fe0f7415015b0f7b9cd64661048c53a93/adbc_driver_sqlite-1.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7ce28d08da6c34e5aaa43d85e1179c304c9d8d487c86d2dcabc6ef115f0b7937", size = 1010543, upload-time = "2025-09-12T12:31:16.07Z" }, + { url = "https://files.pythonhosted.org/packages/d5/3c/c318ca73c9398c00795d25a64e9fbc09146cd148b46ff7582fd95ceb1c48/adbc_driver_sqlite-1.8.0-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b3ca480ef8fc0721790d9ebe7706cb11dea28fbbf98c56ae6c6024da827829ba", size = 957091, upload-time = "2025-09-12T12:31:17.517Z" }, + { url = "https://files.pythonhosted.org/packages/15/18/0cfe03d8ae1ec6f33cc01d8533c8b0e8202b4174332d89efaf01208f5c48/adbc_driver_sqlite-1.8.0-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d808b5cc11ed02a731fdf3d76e14a588add17b6065745be6c26f4f5cd05a6a14", size = 980254, upload-time = "2025-09-12T12:31:19.229Z" }, + { url = "https://files.pythonhosted.org/packages/de/cc/52deb7f2a069fd0d2025ce264e738fcca3cc8b37d5b1cfb0905889c48950/adbc_driver_sqlite-1.8.0-py3-none-win_amd64.whl", hash = "sha256:44d4131d3ffb7ec8563ac82d8662f0d7431b748be44f19203105ea2d249e1d26", size = 955904, upload-time = "2025-09-12T12:31:20.995Z" }, ] [[package]] @@ -665,7 +669,7 @@ wheels = [ [[package]] name = "bump-my-version" -version = "1.2.1" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -679,9 +683,9 @@ dependencies = [ { name = "tomlkit" }, { name = "wcmatch" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d2/1c/2f26665d4be4f1b82b2dfe46f3bd7901582863ddf1bd597309b5d0a5e6d4/bump_my_version-1.2.1.tar.gz", hash = "sha256:96c48f880c149c299312f983d06b50e0277ffc566e64797bf3a6c240bce2dfcc", size = 1137281, upload-time = "2025-07-19T11:52:03.235Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/df/1bec1ba4fcdbd65825b018a9f6598ca03531eafb6d7ec978d08121d44c06/bump_my_version-1.2.2.tar.gz", hash = "sha256:76292bf9f827bf0c039f351a00f8aa74f5348cb796d0d7b2d7d59755f403093c", size = 1147090, upload-time = "2025-09-13T13:09:33.227Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/f4/40db87f649d9104c5fe69706cc455e24481b90024b2aacb64cc0ef205536/bump_my_version-1.2.1-py3-none-any.whl", hash = "sha256:ddb41d5f30abdccce9d2dc873e880bdf04ec8c7e7237c73a4c893aa10b7d7587", size = 59567, upload-time = "2025-07-19T11:52:01.343Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ce/d92d04d91f13b41c8abc40f3f960bb1e6da9c97cf2c997f20ba9734e658c/bump_my_version-1.2.2-py3-none-any.whl", hash = "sha256:d8d2a2cddb2dae54f902f05b65f3fea6afd5e332218608360d7c92a4b9e51f57", size = 59543, upload-time = "2025-09-13T13:09:31.469Z" }, ] [[package]] @@ -2438,7 +2442,7 @@ wheels = [ [[package]] name = "mypy" -version = "1.17.1" +version = "1.18.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, @@ -2446,45 +2450,45 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/a9/3d7aa83955617cdf02f94e50aab5c830d205cfa4320cf124ff64acce3a8e/mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972", size = 11003299, upload-time = "2025-07-31T07:54:06.425Z" }, - { url = "https://files.pythonhosted.org/packages/83/e8/72e62ff837dd5caaac2b4a5c07ce769c8e808a00a65e5d8f94ea9c6f20ab/mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7", size = 10125451, upload-time = "2025-07-31T07:53:52.974Z" }, - { url = "https://files.pythonhosted.org/packages/7d/10/f3f3543f6448db11881776f26a0ed079865926b0c841818ee22de2c6bbab/mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df", size = 11916211, upload-time = "2025-07-31T07:53:18.879Z" }, - { url = "https://files.pythonhosted.org/packages/06/bf/63e83ed551282d67bb3f7fea2cd5561b08d2bb6eb287c096539feb5ddbc5/mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390", size = 12652687, upload-time = "2025-07-31T07:53:30.544Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/68f2eeef11facf597143e85b694a161868b3b006a5fbad50e09ea117ef24/mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94", size = 12896322, upload-time = "2025-07-31T07:53:50.74Z" }, - { url = "https://files.pythonhosted.org/packages/a3/87/8e3e9c2c8bd0d7e071a89c71be28ad088aaecbadf0454f46a540bda7bca6/mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b", size = 9507962, upload-time = "2025-07-31T07:53:08.431Z" }, - { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" }, - { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" }, - { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" }, - { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" }, - { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" }, - { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" }, - { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" }, - { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" }, - { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" }, - { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" }, - { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" }, - { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" }, - { url = "https://files.pythonhosted.org/packages/5b/82/aec2fc9b9b149f372850291827537a508d6c4d3664b1750a324b91f71355/mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7", size = 11075338, upload-time = "2025-07-31T07:53:38.873Z" }, - { url = "https://files.pythonhosted.org/packages/07/ac/ee93fbde9d2242657128af8c86f5d917cd2887584cf948a8e3663d0cd737/mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81", size = 10113066, upload-time = "2025-07-31T07:54:14.707Z" }, - { url = "https://files.pythonhosted.org/packages/5a/68/946a1e0be93f17f7caa56c45844ec691ca153ee8b62f21eddda336a2d203/mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6", size = 11875473, upload-time = "2025-07-31T07:53:14.504Z" }, - { url = "https://files.pythonhosted.org/packages/9f/0f/478b4dce1cb4f43cf0f0d00fba3030b21ca04a01b74d1cd272a528cf446f/mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849", size = 12744296, upload-time = "2025-07-31T07:53:03.896Z" }, - { url = "https://files.pythonhosted.org/packages/ca/70/afa5850176379d1b303f992a828de95fc14487429a7139a4e0bdd17a8279/mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14", size = 12914657, upload-time = "2025-07-31T07:54:08.576Z" }, - { url = "https://files.pythonhosted.org/packages/53/f9/4a83e1c856a3d9c8f6edaa4749a4864ee98486e9b9dbfbc93842891029c2/mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a", size = 9593320, upload-time = "2025-07-31T07:53:01.341Z" }, - { url = "https://files.pythonhosted.org/packages/38/56/79c2fac86da57c7d8c48622a05873eaab40b905096c33597462713f5af90/mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733", size = 11040037, upload-time = "2025-07-31T07:54:10.942Z" }, - { url = "https://files.pythonhosted.org/packages/4d/c3/adabe6ff53638e3cad19e3547268482408323b1e68bf082c9119000cd049/mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd", size = 10131550, upload-time = "2025-07-31T07:53:41.307Z" }, - { url = "https://files.pythonhosted.org/packages/b8/c5/2e234c22c3bdeb23a7817af57a58865a39753bde52c74e2c661ee0cfc640/mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0", size = 11872963, upload-time = "2025-07-31T07:53:16.878Z" }, - { url = "https://files.pythonhosted.org/packages/ab/26/c13c130f35ca8caa5f2ceab68a247775648fdcd6c9a18f158825f2bc2410/mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a", size = 12710189, upload-time = "2025-07-31T07:54:01.962Z" }, - { url = "https://files.pythonhosted.org/packages/82/df/c7d79d09f6de8383fe800521d066d877e54d30b4fb94281c262be2df84ef/mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91", size = 12900322, upload-time = "2025-07-31T07:53:10.551Z" }, - { url = "https://files.pythonhosted.org/packages/b8/98/3d5a48978b4f708c55ae832619addc66d677f6dc59f3ebad71bae8285ca6/mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed", size = 9751879, upload-time = "2025-07-31T07:52:56.683Z" }, - { url = "https://files.pythonhosted.org/packages/29/cb/673e3d34e5d8de60b3a61f44f80150a738bff568cd6b7efb55742a605e98/mypy-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9", size = 10992466, upload-time = "2025-07-31T07:53:57.574Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d0/fe1895836eea3a33ab801561987a10569df92f2d3d4715abf2cfeaa29cb2/mypy-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99", size = 10117638, upload-time = "2025-07-31T07:53:34.256Z" }, - { url = "https://files.pythonhosted.org/packages/97/f3/514aa5532303aafb95b9ca400a31054a2bd9489de166558c2baaeea9c522/mypy-1.17.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8", size = 11915673, upload-time = "2025-07-31T07:52:59.361Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c3/c0805f0edec96fe8e2c048b03769a6291523d509be8ee7f56ae922fa3882/mypy-1.17.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8", size = 12649022, upload-time = "2025-07-31T07:53:45.92Z" }, - { url = "https://files.pythonhosted.org/packages/45/3e/d646b5a298ada21a8512fa7e5531f664535a495efa672601702398cea2b4/mypy-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259", size = 12895536, upload-time = "2025-07-31T07:53:06.17Z" }, - { url = "https://files.pythonhosted.org/packages/14/55/e13d0dcd276975927d1f4e9e2ec4fd409e199f01bdc671717e673cc63a22/mypy-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d", size = 9512564, upload-time = "2025-07-31T07:53:12.346Z" }, - { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/14/a3/931e09fc02d7ba96da65266884da4e4a8806adcdb8a57faaacc6edf1d538/mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9", size = 3448447, upload-time = "2025-09-11T23:00:47.067Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/06/29ea5a34c23938ae93bc0040eb2900eb3f0f2ef4448cc59af37ab3ddae73/mypy-1.18.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2761b6ae22a2b7d8e8607fb9b81ae90bc2e95ec033fd18fa35e807af6c657763", size = 12811535, upload-time = "2025-09-11T22:58:55.399Z" }, + { url = "https://files.pythonhosted.org/packages/a8/40/04c38cb04fa9f1dc224b3e9634021a92c47b1569f1c87dfe6e63168883bb/mypy-1.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5b10e3ea7f2eec23b4929a3fabf84505da21034a4f4b9613cda81217e92b74f3", size = 11897559, upload-time = "2025-09-11T22:59:48.041Z" }, + { url = "https://files.pythonhosted.org/packages/46/bf/4c535bd45ea86cebbc1a3b6a781d442f53a4883f322ebd2d442db6444d0b/mypy-1.18.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:261fbfced030228bc0f724d5d92f9ae69f46373bdfd0e04a533852677a11dbea", size = 12507430, upload-time = "2025-09-11T22:59:30.415Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e1/cbefb16f2be078d09e28e0b9844e981afb41f6ffc85beb68b86c6976e641/mypy-1.18.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4dc6b34a1c6875e6286e27d836a35c0d04e8316beac4482d42cfea7ed2527df8", size = 13243717, upload-time = "2025-09-11T22:59:11.297Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/3e963da63176f16ca9caea7fa48f1bc8766de317cd961528c0391565fd47/mypy-1.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1cabb353194d2942522546501c0ff75c4043bf3b63069cb43274491b44b773c9", size = 13492052, upload-time = "2025-09-11T23:00:09.29Z" }, + { url = "https://files.pythonhosted.org/packages/4b/09/d5d70c252a3b5b7530662d145437bd1de15f39fa0b48a27ee4e57d254aa1/mypy-1.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:738b171690c8e47c93569635ee8ec633d2cdb06062f510b853b5f233020569a9", size = 9765846, upload-time = "2025-09-11T22:58:26.198Z" }, + { url = "https://files.pythonhosted.org/packages/32/28/47709d5d9e7068b26c0d5189c8137c8783e81065ad1102b505214a08b548/mypy-1.18.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c903857b3e28fc5489e54042684a9509039ea0aedb2a619469438b544ae1961", size = 12734635, upload-time = "2025-09-11T23:00:24.983Z" }, + { url = "https://files.pythonhosted.org/packages/7c/12/ee5c243e52497d0e59316854041cf3b3130131b92266d0764aca4dec3c00/mypy-1.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a0c8392c19934c2b6c65566d3a6abdc6b51d5da7f5d04e43f0eb627d6eeee65", size = 11817287, upload-time = "2025-09-11T22:59:07.38Z" }, + { url = "https://files.pythonhosted.org/packages/48/bd/2aeb950151005fe708ab59725afed7c4aeeb96daf844f86a05d4b8ac34f8/mypy-1.18.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f85eb7efa2ec73ef63fc23b8af89c2fe5bf2a4ad985ed2d3ff28c1bb3c317c92", size = 12430464, upload-time = "2025-09-11T22:58:48.084Z" }, + { url = "https://files.pythonhosted.org/packages/71/e8/7a20407aafb488acb5734ad7fb5e8c2ef78d292ca2674335350fa8ebef67/mypy-1.18.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:82ace21edf7ba8af31c3308a61dc72df30500f4dbb26f99ac36b4b80809d7e94", size = 13164555, upload-time = "2025-09-11T23:00:13.803Z" }, + { url = "https://files.pythonhosted.org/packages/e8/c9/5f39065252e033b60f397096f538fb57c1d9fd70a7a490f314df20dd9d64/mypy-1.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a2dfd53dfe632f1ef5d161150a4b1f2d0786746ae02950eb3ac108964ee2975a", size = 13359222, upload-time = "2025-09-11T23:00:33.469Z" }, + { url = "https://files.pythonhosted.org/packages/85/b6/d54111ef3c1e55992cd2ec9b8b6ce9c72a407423e93132cae209f7e7ba60/mypy-1.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:320f0ad4205eefcb0e1a72428dde0ad10be73da9f92e793c36228e8ebf7298c0", size = 9760441, upload-time = "2025-09-11T23:00:44.826Z" }, + { url = "https://files.pythonhosted.org/packages/e7/14/1c3f54d606cb88a55d1567153ef3a8bc7b74702f2ff5eb64d0994f9e49cb/mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9", size = 12911082, upload-time = "2025-09-11T23:00:41.465Z" }, + { url = "https://files.pythonhosted.org/packages/90/83/235606c8b6d50a8eba99773add907ce1d41c068edb523f81eb0d01603a83/mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e", size = 11919107, upload-time = "2025-09-11T22:58:40.903Z" }, + { url = "https://files.pythonhosted.org/packages/ca/25/4e2ce00f8d15b99d0c68a2536ad63e9eac033f723439ef80290ec32c1ff5/mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2", size = 12472551, upload-time = "2025-09-11T22:58:37.272Z" }, + { url = "https://files.pythonhosted.org/packages/32/bb/92642a9350fc339dd9dcefcf6862d171b52294af107d521dce075f32f298/mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d", size = 13340554, upload-time = "2025-09-11T22:59:38.756Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ee/38d01db91c198fb6350025d28f9719ecf3c8f2c55a0094bfbf3ef478cc9a/mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5", size = 13530933, upload-time = "2025-09-11T22:59:20.228Z" }, + { url = "https://files.pythonhosted.org/packages/da/8d/6d991ae631f80d58edbf9d7066e3f2a96e479dca955d9a968cd6e90850a3/mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf", size = 9828426, upload-time = "2025-09-11T23:00:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ec/ef4a7260e1460a3071628a9277a7579e7da1b071bc134ebe909323f2fbc7/mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f", size = 12918671, upload-time = "2025-09-11T22:58:29.814Z" }, + { url = "https://files.pythonhosted.org/packages/a1/82/0ea6c3953f16223f0b8eda40c1aeac6bd266d15f4902556ae6e91f6fca4c/mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce", size = 11913023, upload-time = "2025-09-11T23:00:29.049Z" }, + { url = "https://files.pythonhosted.org/packages/ae/ef/5e2057e692c2690fc27b3ed0a4dbde4388330c32e2576a23f0302bc8358d/mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e", size = 12473355, upload-time = "2025-09-11T23:00:04.544Z" }, + { url = "https://files.pythonhosted.org/packages/98/43/b7e429fc4be10e390a167b0cd1810d41cb4e4add4ae50bab96faff695a3b/mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71", size = 13346944, upload-time = "2025-09-11T22:58:23.024Z" }, + { url = "https://files.pythonhosted.org/packages/89/4e/899dba0bfe36bbd5b7c52e597de4cf47b5053d337b6d201a30e3798e77a6/mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746", size = 13512574, upload-time = "2025-09-11T22:59:52.152Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f8/7661021a5b0e501b76440454d786b0f01bb05d5c4b125fcbda02023d0250/mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d", size = 9837684, upload-time = "2025-09-11T22:58:44.454Z" }, + { url = "https://files.pythonhosted.org/packages/bf/87/7b173981466219eccc64c107cf8e5ab9eb39cc304b4c07df8e7881533e4f/mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61", size = 12900265, upload-time = "2025-09-11T22:59:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/ae/cc/b10e65bae75b18a5ac8f81b1e8e5867677e418f0dd2c83b8e2de9ba96ebd/mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5", size = 11942890, upload-time = "2025-09-11T23:00:00.607Z" }, + { url = "https://files.pythonhosted.org/packages/39/d4/aeefa07c44d09f4c2102e525e2031bc066d12e5351f66b8a83719671004d/mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8", size = 12472291, upload-time = "2025-09-11T22:59:43.425Z" }, + { url = "https://files.pythonhosted.org/packages/c6/07/711e78668ff8e365f8c19735594ea95938bff3639a4c46a905e3ed8ff2d6/mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d", size = 13318610, upload-time = "2025-09-11T23:00:17.604Z" }, + { url = "https://files.pythonhosted.org/packages/ca/85/df3b2d39339c31d360ce299b418c55e8194ef3205284739b64962f6074e7/mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d", size = 13513697, upload-time = "2025-09-11T22:58:59.534Z" }, + { url = "https://files.pythonhosted.org/packages/b1/df/462866163c99ea73bb28f0eb4d415c087e30de5d36ee0f5429d42e28689b/mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce", size = 9985739, upload-time = "2025-09-11T22:58:51.644Z" }, + { url = "https://files.pythonhosted.org/packages/64/1a/9005d78ffedaac58b3ee3a44d53a65b09ac1d27c36a00ade849015b8e014/mypy-1.18.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e37763af63a8018308859bc83d9063c501a5820ec5bd4a19f0a2ac0d1c25c061", size = 12809347, upload-time = "2025-09-11T22:59:15.468Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/c932216b281f7c223a2c8b98b9c8e1eb5bea1650c11317ac778cfc3778e4/mypy-1.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:51531b6e94f34b8bd8b01dee52bbcee80daeac45e69ec5c36e25bce51cbc46e6", size = 11899906, upload-time = "2025-09-11T22:59:56.473Z" }, + { url = "https://files.pythonhosted.org/packages/30/6b/542daf553f97275677c35d183404d1d83b64cea315f452195c5a5782a225/mypy-1.18.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dbfdea20e90e9c5476cea80cfd264d8e197c6ef2c58483931db2eefb2f7adc14", size = 12504415, upload-time = "2025-09-11T23:00:37.332Z" }, + { url = "https://files.pythonhosted.org/packages/37/d3/061d0d861377ea3fdb03784d11260bfa2adbb4eeeb24b63bd1eea7b6080c/mypy-1.18.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99f272c9b59f5826fffa439575716276d19cbf9654abc84a2ba2d77090a0ba14", size = 13243466, upload-time = "2025-09-11T22:58:18.562Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5e/6e88a79bdfec8d01ba374c391150c94f6c74545bdc37bdc490a7f30c5095/mypy-1.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8c05a7f8c00300a52f3a4fcc95a185e99bf944d7e851ff141bae8dcf6dcfeac4", size = 13493539, upload-time = "2025-09-11T22:59:24.479Z" }, + { url = "https://files.pythonhosted.org/packages/92/5a/a14a82e44ed76998d73a070723b6584963fdb62f597d373c8b22c3a3da3d/mypy-1.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:2fbcecbe5cf213ba294aa8c0b8c104400bf7bb64db82fb34fe32a205da4b3531", size = 9764809, upload-time = "2025-09-11T22:58:33.133Z" }, + { url = "https://files.pythonhosted.org/packages/e0/1d/4b97d3089b48ef3d904c9ca69fab044475bd03245d878f5f0b3ea1daf7ce/mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e", size = 2352212, upload-time = "2025-09-11T22:59:26.576Z" }, ] [[package]] @@ -3349,18 +3353,18 @@ wheels = [ [[package]] name = "protobuf" -version = "6.32.0" +version = "6.32.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c0/df/fb4a8eeea482eca989b51cffd274aac2ee24e825f0bf3cbce5281fa1567b/protobuf-6.32.0.tar.gz", hash = "sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2", size = 440614, upload-time = "2025-08-14T21:21:25.015Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/a4/cc17347aa2897568beece2e674674359f911d6fe21b0b8d6268cd42727ac/protobuf-6.32.1.tar.gz", hash = "sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d", size = 440635, upload-time = "2025-09-11T21:38:42.935Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/18/df8c87da2e47f4f1dcc5153a81cd6bca4e429803f4069a299e236e4dd510/protobuf-6.32.0-cp310-abi3-win32.whl", hash = "sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741", size = 424409, upload-time = "2025-08-14T21:21:12.366Z" }, - { url = "https://files.pythonhosted.org/packages/e1/59/0a820b7310f8139bd8d5a9388e6a38e1786d179d6f33998448609296c229/protobuf-6.32.0-cp310-abi3-win_amd64.whl", hash = "sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e", size = 435735, upload-time = "2025-08-14T21:21:15.046Z" }, - { url = "https://files.pythonhosted.org/packages/cc/5b/0d421533c59c789e9c9894683efac582c06246bf24bb26b753b149bd88e4/protobuf-6.32.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0", size = 426449, upload-time = "2025-08-14T21:21:16.687Z" }, - { url = "https://files.pythonhosted.org/packages/ec/7b/607764ebe6c7a23dcee06e054fd1de3d5841b7648a90fd6def9a3bb58c5e/protobuf-6.32.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1", size = 322869, upload-time = "2025-08-14T21:21:18.282Z" }, - { url = "https://files.pythonhosted.org/packages/40/01/2e730bd1c25392fc32e3268e02446f0d77cb51a2c3a8486b1798e34d5805/protobuf-6.32.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c", size = 322009, upload-time = "2025-08-14T21:21:19.893Z" }, - { url = "https://files.pythonhosted.org/packages/84/9c/244509764dc78d69e4a72bfe81b00f2691bdfcaffdb591a3e158695096d7/protobuf-6.32.0-cp39-cp39-win32.whl", hash = "sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb", size = 424503, upload-time = "2025-08-14T21:21:21.328Z" }, - { url = "https://files.pythonhosted.org/packages/9b/6f/b1d90a22f619808cf6337aede0d6730af1849330f8dc4d434cfc4a8831b4/protobuf-6.32.0-cp39-cp39-win_amd64.whl", hash = "sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3", size = 435822, upload-time = "2025-08-14T21:21:22.495Z" }, - { url = "https://files.pythonhosted.org/packages/9c/f2/80ffc4677aac1bc3519b26bc7f7f5de7fce0ee2f7e36e59e27d8beb32dd1/protobuf-6.32.0-py3-none-any.whl", hash = "sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783", size = 169287, upload-time = "2025-08-14T21:21:23.515Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/645183ea03ab3995d29086b8bf4f7562ebd3d10c9a4b14ee3f20d47cfe50/protobuf-6.32.1-cp310-abi3-win32.whl", hash = "sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085", size = 424411, upload-time = "2025-09-11T21:38:27.427Z" }, + { url = "https://files.pythonhosted.org/packages/8c/f3/6f58f841f6ebafe076cebeae33fc336e900619d34b1c93e4b5c97a81fdfa/protobuf-6.32.1-cp310-abi3-win_amd64.whl", hash = "sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1", size = 435738, upload-time = "2025-09-11T21:38:30.959Z" }, + { url = "https://files.pythonhosted.org/packages/10/56/a8a3f4e7190837139e68c7002ec749190a163af3e330f65d90309145a210/protobuf-6.32.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281", size = 426454, upload-time = "2025-09-11T21:38:34.076Z" }, + { url = "https://files.pythonhosted.org/packages/3f/be/8dd0a927c559b37d7a6c8ab79034fd167dcc1f851595f2e641ad62be8643/protobuf-6.32.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4", size = 322874, upload-time = "2025-09-11T21:38:35.509Z" }, + { url = "https://files.pythonhosted.org/packages/5c/f6/88d77011b605ef979aace37b7703e4eefad066f7e84d935e5a696515c2dd/protobuf-6.32.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710", size = 322013, upload-time = "2025-09-11T21:38:37.017Z" }, + { url = "https://files.pythonhosted.org/packages/05/9d/d6f1a8b6657296920c58f6b85f7bca55fa27e3ca7fc5914604d89cd0250b/protobuf-6.32.1-cp39-cp39-win32.whl", hash = "sha256:68ff170bac18c8178f130d1ccb94700cf72852298e016a2443bdb9502279e5f1", size = 424505, upload-time = "2025-09-11T21:38:38.415Z" }, + { url = "https://files.pythonhosted.org/packages/ed/cd/891bd2d23558f52392a5687b2406a741e2e28d629524c88aade457029acd/protobuf-6.32.1-cp39-cp39-win_amd64.whl", hash = "sha256:d0975d0b2f3e6957111aa3935d08a0eb7e006b1505d825f862a1fffc8348e122", size = 435825, upload-time = "2025-09-11T21:38:39.773Z" }, + { url = "https://files.pythonhosted.org/packages/97/b7/15cc7d93443d6c6a84626ae3258a91f4c6ac8c0edd5df35ea7658f71b79c/protobuf-6.32.1-py3-none-any.whl", hash = "sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346", size = 169289, upload-time = "2025-09-11T21:38:41.234Z" }, ] [[package]] @@ -3704,7 +3708,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -3712,9 +3716,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, ] [[package]] @@ -3990,16 +3994,16 @@ wheels = [ [[package]] name = "pytest-asyncio" -version = "1.1.0" +version = "1.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, { name = "pytest" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" }, + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, ] [[package]] @@ -4971,11 +4975,11 @@ asyncio = [ [[package]] name = "sqlglot" -version = "27.13.2" +version = "27.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/66/d323d56ffc98a50e17e172612f2f244e7a06b54d959c2d5acb14d83a8ab1/sqlglot-27.13.2.tar.gz", hash = "sha256:81a3b46be3a6700c23f72f74fc06059289d11468aed59242687ba1600ebcef52", size = 5457480, upload-time = "2025-09-08T23:21:00.335Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/1a/ce57767e3b791c670aa395c92c42c5b5fe11f12c2504a656f8463862ba98/sqlglot-27.14.0.tar.gz", hash = "sha256:456c82ec95dd05927cfe37cb57d4540acbfec6f0743f8c8f246147d56549ba88", size = 5462946, upload-time = "2025-09-11T21:05:59.916Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/e6/79ff6a637f30a96e2fb73955e74bee42402f7bf137556f32d6405c21820c/sqlglot-27.13.2-py3-none-any.whl", hash = "sha256:88bb687ed3233a2ada1cd49483ad93a80c44ddc7e0164c1709f2cf7e32465911", size = 512483, upload-time = "2025-09-08T23:20:57.794Z" }, + { url = "https://files.pythonhosted.org/packages/c6/be/fbd6905dc14e0cd118a21cd48ff39a60407f7059801cd1afc1913d9e86da/sqlglot-27.14.0-py3-none-any.whl", hash = "sha256:a5adc68abc85ccd249258ae0f3aff3c1869bb5b086e360375e16518858ce8a7a", size = 515883, upload-time = "2025-09-11T21:05:57.349Z" }, ] [package.optional-dependencies] @@ -5569,14 +5573,14 @@ wheels = [ [[package]] name = "types-cffi" -version = "1.17.0.20250822" +version = "1.17.0.20250914" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/0c/76a48cb6e742cac4d61a4ec632dd30635b6d302f5acdc2c0a27572ac7ae3/types_cffi-1.17.0.20250822.tar.gz", hash = "sha256:bf6f5a381ea49da7ff895fae69711271e6192c434470ce6139bf2b2e0d0fa08d", size = 17130, upload-time = "2025-08-22T03:04:02.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/2c/72ee071568b2777a49f6a33bc56591df903449acdf93486bd89427d7893b/types_cffi-1.17.0.20250914.tar.gz", hash = "sha256:b0d051a5d7cfc22a5195f2167e0130ee4755baa6f045f4efa2bf3dc12916679f", size = 17157, upload-time = "2025-09-14T02:56:37.768Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/21/f7/68029931e7539e3246b33386a19c475f234c71d2a878411847b20bb31960/types_cffi-1.17.0.20250822-py3-none-any.whl", hash = "sha256:183dd76c1871a48936d7b931488e41f0f25a7463abe10b5816be275fc11506d5", size = 20083, upload-time = "2025-08-22T03:04:01.466Z" }, + { url = "https://files.pythonhosted.org/packages/c7/43/1b676c3a6f3a81ed4b34c5f0148320f3029d3dc85f91f3f5aca7cc3136f4/types_cffi-1.17.0.20250914-py3-none-any.whl", hash = "sha256:f563d7ac8faa664be9e828222bea5cc9cdda9e3fcdc9f077434c5a1fda9c802a", size = 20094, upload-time = "2025-09-14T02:56:36.835Z" }, ] [[package]] @@ -5590,20 +5594,20 @@ wheels = [ [[package]] name = "types-docutils" -version = "0.22.0.20250822" +version = "0.22.0.20250914" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b4/e3/b28d7786f4a5170095f59846d492c2980656c30ef4405ae94156ff63151c/types_docutils-0.22.0.20250822.tar.gz", hash = "sha256:40efebeef8467ae7648a33f3fa6f778bd94d338ca1f4a1c924b206d2f687f60a", size = 56487, upload-time = "2025-08-22T03:03:07.576Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/72/48cad115dff86755d83bbb37eb70df2d26a1fb2d8b5e1725d6524e0f08a4/types_docutils-0.22.0.20250914.tar.gz", hash = "sha256:0c7f61c90ed2900fa5c8e6cd375222981be1e28240b8c8a67ca4a186e367618d", size = 56496, upload-time = "2025-09-14T02:56:04.766Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/02/4822bbddf4dae6b5dfe28d257c1e1f128c8315da8709e6d1862e055c13f2/types_docutils-0.22.0.20250822-py3-none-any.whl", hash = "sha256:890d5986045b8a532b56e7f0d4979de3afc23b4543de40910ec8c71ec5f3ba99", size = 91786, upload-time = "2025-08-22T03:03:06.522Z" }, + { url = "https://files.pythonhosted.org/packages/ec/7b/ddf2a291e5145d8abe3bf8e264b232b8bd9c6865121257dfd43079ce9b6d/types_docutils-0.22.0.20250914-py3-none-any.whl", hash = "sha256:f1eec1a6024feef6560688fd9525ff888b95866cecb685e0a68bd095e817b00a", size = 91784, upload-time = "2025-09-14T02:56:03.449Z" }, ] [[package]] name = "types-protobuf" -version = "6.30.2.20250822" +version = "6.30.2.20250914" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/68/0c7144be5c6dc16538e79458839fc914ea494481c7e64566de4ecc0c3682/types_protobuf-6.30.2.20250822.tar.gz", hash = "sha256:faacbbe87bd8cba4472361c0bd86f49296bd36f7761e25d8ada4f64767c1bde9", size = 62379, upload-time = "2025-08-22T03:01:56.572Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/d1/e12dad323fe6e2455b768828de288f60d5160f41dad5d31af8ef92a6acbb/types_protobuf-6.30.2.20250914.tar.gz", hash = "sha256:c2105326d0a52de3d33b84af0010d834ebbd4c17c50ff261fa82551ab75d9559", size = 62424, upload-time = "2025-09-14T02:56:00.798Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/64/b926a6355993f712d7828772e42b9ae942f2d306d25072329805c374e729/types_protobuf-6.30.2.20250822-py3-none-any.whl", hash = "sha256:5584c39f7e36104b5f8bdfd31815fa1d5b7b3455a79ddddc097b62320f4b1841", size = 76523, upload-time = "2025-08-22T03:01:55.157Z" }, + { url = "https://files.pythonhosted.org/packages/6c/c4/3fcb1f8e03456a8a33a5dfb9f9788b0a91023e5fad6a37d46fc6831629a7/types_protobuf-6.30.2.20250914-py3-none-any.whl", hash = "sha256:cfc24977c0f38cf2896d918a59faed7650eb983be6070343a6204ac8ac0a297e", size = 76546, upload-time = "2025-09-14T02:55:59.489Z" }, ] [[package]]