diff --git a/pyproject.toml b/pyproject.toml index 44dfc763..3d57aa9e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ maintainers = [{ name = "Litestar Developers", email = "hello@litestar.dev" }] name = "sqlspec" readme = "README.md" requires-python = ">=3.9, <4.0" -version = "0.16.2" +version = "0.16.3" [project.urls] Discord = "https://discord.gg/litestar" diff --git a/sqlspec/base.py b/sqlspec/base.py index 21cb0cca..ef19f671 100644 --- a/sqlspec/base.py +++ b/sqlspec/base.py @@ -26,7 +26,10 @@ if TYPE_CHECKING: from contextlib import AbstractAsyncContextManager, AbstractContextManager + from pathlib import Path + from sqlspec.core.statement import SQL + from sqlspec.loader import SQLFileLoader from sqlspec.typing import ConnectionT, PoolT @@ -38,13 +41,14 @@ class SQLSpec: """Configuration manager and registry for database connections and pools.""" - __slots__ = ("_configs", "_instance_cache_config") + __slots__ = ("_configs", "_instance_cache_config", "_sql_loader") - def __init__(self) -> None: + def __init__(self, *, loader: "Optional[SQLFileLoader]" = None) -> None: self._configs: dict[Any, DatabaseConfigProtocol[Any, Any, Any]] = {} # Register sync cleanup only for sync resources atexit.register(self._cleanup_sync_pools) self._instance_cache_config: Optional[CacheConfig] = None + self._sql_loader: Optional[SQLFileLoader] = loader @staticmethod def _get_config_name(obj: Any) -> str: @@ -591,3 +595,98 @@ def configure_cache( else current_config.optimized_cache_enabled, ) ) + + # SQL File Loading Integration + + def _ensure_sql_loader(self) -> "SQLFileLoader": + """Ensure SQL loader is initialized lazily.""" + if self._sql_loader is None: + # Import here to avoid circular imports + from sqlspec.loader import SQLFileLoader + + self._sql_loader = SQLFileLoader() + return self._sql_loader + + def load_sql_files(self, *paths: "Union[str, Path]") -> None: + """Load SQL files from paths or directories. + + Args: + *paths: One or more file paths or directory paths to load. + """ + loader = self._ensure_sql_loader() + loader.load_sql(*paths) + logger.debug("Loaded SQL files: %s", paths) + + def add_named_sql(self, name: str, sql: str, dialect: "Optional[str]" = None) -> None: + """Add a named SQL query directly. + + Args: + name: Name for the SQL query. + sql: Raw SQL content. + dialect: Optional dialect for the SQL statement. + """ + loader = self._ensure_sql_loader() + loader.add_named_sql(name, sql, dialect) + logger.debug("Added named SQL: %s", name) + + def get_sql(self, name: str) -> "SQL": + """Get a SQL object by name. + + Args: + name: Name of the statement (from -- name: in SQL file). + Hyphens in names are converted to underscores. + + Returns: + SQL object ready for execution. + """ + loader = self._ensure_sql_loader() + return loader.get_sql(name) + + def list_sql_queries(self) -> "list[str]": + """List all available query names. + + Returns: + Sorted list of query names. + """ + if self._sql_loader is None: + return [] + return self._sql_loader.list_queries() + + def has_sql_query(self, name: str) -> bool: + """Check if a SQL query exists. + + Args: + name: Query name to check. + + Returns: + True if query exists. + """ + if self._sql_loader is None: + return False + return self._sql_loader.has_query(name) + + def clear_sql_cache(self) -> None: + """Clear the SQL file cache.""" + if self._sql_loader is not None: + self._sql_loader.clear_cache() + logger.debug("Cleared SQL cache") + + def reload_sql_files(self) -> None: + """Reload all SQL files. + + Note: This clears the cache and requires calling load_sql_files again. + """ + if self._sql_loader is not None: + # Clear cache to force reload + self._sql_loader.clear_cache() + logger.debug("Cleared SQL cache for reload") + + def get_sql_files(self) -> "list[str]": + """Get list of loaded SQL files. + + Returns: + Sorted list of file paths. + """ + if self._sql_loader is None: + return [] + return self._sql_loader.list_files() diff --git a/sqlspec/loader.py b/sqlspec/loader.py index ba000a1e..a7df404d 100644 --- a/sqlspec/loader.py +++ b/sqlspec/loader.py @@ -14,8 +14,7 @@ from typing import Any, Optional, Union from sqlspec.core.cache import CacheKey, get_cache_config, get_default_cache -from sqlspec.core.parameters import ParameterStyleConfig, ParameterValidator -from sqlspec.core.statement import SQL, StatementConfig +from sqlspec.core.statement import SQL from sqlspec.exceptions import ( MissingDependencyError, SQLFileNotFoundError, @@ -34,7 +33,7 @@ # Matches: -- name: query_name (supports hyphens and special suffixes) # We capture the name plus any trailing special characters QUERY_NAME_PATTERN = re.compile(r"^\s*--\s*name\s*:\s*([\w-]+[^\w\s]*)\s*$", re.MULTILINE | re.IGNORECASE) -TRIM_SPECIAL_CHARS = re.compile(r"[^\w-]") +TRIM_SPECIAL_CHARS = re.compile(r"[^\w.-]") # Matches: -- dialect: dialect_name (optional dialect specification) DIALECT_PATTERN = re.compile(r"^\s*--\s*dialect\s*:\s*(?P[a-zA-Z0-9_]+)\s*$", re.IGNORECASE | re.MULTILINE) @@ -581,8 +580,11 @@ def add_named_sql(self, name: str, sql: str, dialect: "Optional[str]" = None) -> Raises: ValueError: If query name already exists. """ - if name in self._queries: - existing_source = self._query_to_file.get(name, "") + # Normalize the name for consistency with file-loaded queries + normalized_name = _normalize_query_name(name) + + if normalized_name in self._queries: + existing_source = self._query_to_file.get(normalized_name, "") msg = f"Query name '{name}' already exists (source: {existing_source})" raise ValueError(msg) @@ -599,21 +601,16 @@ def add_named_sql(self, name: str, sql: str, dialect: "Optional[str]" = None) -> else: dialect = normalized_dialect - statement = NamedStatement(name=name, sql=sql.strip(), dialect=dialect, start_line=0) - self._queries[name] = statement - self._query_to_file[name] = "" + statement = NamedStatement(name=normalized_name, sql=sql.strip(), dialect=dialect, start_line=0) + self._queries[normalized_name] = statement + self._query_to_file[normalized_name] = "" - def get_sql( - self, name: str, parameters: "Optional[Any]" = None, dialect: "Optional[str]" = None, **kwargs: "Any" - ) -> "SQL": - """Get a SQL object by statement name with dialect support. + def get_sql(self, name: str) -> "SQL": + """Get a SQL object by statement name. Args: name: Name of the statement (from -- name: in SQL file). Hyphens in names are converted to underscores. - parameters: Parameters for the SQL statement. - dialect: Optional dialect override. - **kwargs: Additional parameters to pass to the SQL object. Returns: SQL object ready for execution. @@ -640,46 +637,11 @@ def get_sql( raise SQLFileNotFoundError(name, path=f"Statement '{name}' not found. Available statements: {available}") parsed_statement = self._queries[safe_name] - - effective_dialect = dialect or parsed_statement.dialect - - if dialect is not None: - normalized_dialect = _normalize_dialect(dialect) - if normalized_dialect not in SUPPORTED_DIALECTS: - suggestions = _get_dialect_suggestions(normalized_dialect) - warning_msg = f"Unknown dialect '{dialect}'" - if suggestions: - warning_msg += f". Did you mean: {', '.join(suggestions)}?" - warning_msg += f". Supported dialects: {', '.join(sorted(SUPPORTED_DIALECTS))}. Using dialect as-is." - logger.warning(warning_msg) - effective_dialect = dialect.lower() - else: - effective_dialect = normalized_dialect - - sql_kwargs = dict(kwargs) - if parameters is not None: - sql_kwargs["parameters"] = parameters - sqlglot_dialect = None - if effective_dialect: - sqlglot_dialect = _normalize_dialect_for_sqlglot(effective_dialect) - - if not effective_dialect and "statement_config" not in sql_kwargs: - validator = ParameterValidator() - param_info = validator.extract_parameters(parsed_statement.sql) - if param_info: - styles = {p.style for p in param_info} - if styles: - detected_style = next(iter(styles)) - sql_kwargs["statement_config"] = StatementConfig( - parameter_config=ParameterStyleConfig( - default_parameter_style=detected_style, - supported_parameter_styles=styles, - preserve_parameter_format=True, - ) - ) + if parsed_statement.dialect: + sqlglot_dialect = _normalize_dialect_for_sqlglot(parsed_statement.dialect) - return SQL(parsed_statement.sql, dialect=sqlglot_dialect, **sql_kwargs) + return SQL(parsed_statement.sql, dialect=sqlglot_dialect) def get_file(self, path: Union[str, Path]) -> "Optional[SQLFile]": """Get a loaded SQLFile object by path. diff --git a/tests/fixtures/asset_maintenance.sql b/tests/fixtures/asset_maintenance.sql new file mode 100644 index 00000000..b8d97513 --- /dev/null +++ b/tests/fixtures/asset_maintenance.sql @@ -0,0 +1,12 @@ +-- name: asset_maintenance_alert +-- Get a list of maintenances that are happening between 2 dates and insert the alert to be sent into the database, returns inserted data +with inserted_data as ( +insert into alert_users (user_id, asset_maintenance_id, alert_definition_id) +select responsible_id, id, (select id from alert_definition where name = 'maintenances_today') from asset_maintenance +where planned_date_start is not null +and planned_date_start between :date_start and :date_end +and cancelled = False ON CONFLICT ON CONSTRAINT unique_alert DO NOTHING +returning *) +select inserted_data.*, to_jsonb(users.*) as user +from inserted_data +left join users on users.id = inserted_data.user_id; diff --git a/tests/fixtures/oracle.ddl.sql b/tests/fixtures/oracle.ddl.sql index aa900013..98a2e904 100644 --- a/tests/fixtures/oracle.ddl.sql +++ b/tests/fixtures/oracle.ddl.sql @@ -1,3 +1,4 @@ +-- name: oracle-sysdba-ddl -- Oracle 23AI Database Schema for Coffee Recommendation System -- This script creates all necessary tables with Oracle 23AI features diff --git a/tests/unit/test_base_sql_integration.py b/tests/unit/test_base_sql_integration.py new file mode 100644 index 00000000..f7a3b16f --- /dev/null +++ b/tests/unit/test_base_sql_integration.py @@ -0,0 +1,318 @@ +"""Unit tests for SQLSpec SQL loading integration. + +Tests the integration of SQLFileLoader functionality into the SQLSpec base class, +ensuring that all SQL loading methods work correctly and don't interfere with +existing database configuration functionality. +""" + +import tempfile +from pathlib import Path +from unittest.mock import Mock, patch + +import pytest + +from sqlspec.base import SQLSpec +from sqlspec.core.statement import SQL +from sqlspec.exceptions import SQLFileNotFoundError +from sqlspec.loader import SQLFileLoader + + +class TestSQLSpecSQLIntegration: + """Test SQLSpec SQL loading integration.""" + + def test_init_without_loader(self) -> None: + """Test SQLSpec initialization without a loader.""" + sql_spec = SQLSpec() + assert sql_spec._sql_loader is None + + def test_init_with_loader(self) -> None: + """Test SQLSpec initialization with a provided loader.""" + loader = SQLFileLoader() + sql_spec = SQLSpec(loader=loader) + assert sql_spec._sql_loader is loader + + def test_lazy_loader_initialization(self) -> None: + """Test that loader is created lazily when first needed.""" + sql_spec = SQLSpec() + assert sql_spec._sql_loader is None + + # First call should create the loader + loader = sql_spec._ensure_sql_loader() + assert isinstance(loader, SQLFileLoader) + assert sql_spec._sql_loader is loader + + # Second call should return the same loader + loader2 = sql_spec._ensure_sql_loader() + assert loader2 is loader + + def test_add_named_sql(self) -> None: + """Test adding a named SQL query directly.""" + sql_spec = SQLSpec() + + # Add a simple query + sql_spec.add_named_sql("test_query", "SELECT 1 AS result") + + # Should be able to retrieve it + assert sql_spec.has_sql_query("test_query") + sql_obj = sql_spec.get_sql("test_query") + assert isinstance(sql_obj, SQL) + assert "SELECT 1 AS result" in sql_obj.sql + + def test_add_named_sql_with_dialect(self) -> None: + """Test adding a named SQL query with dialect.""" + sql_spec = SQLSpec() + + sql_spec.add_named_sql("postgres_query", "SELECT ARRAY_AGG(name) FROM users", dialect="postgres") + + assert sql_spec.has_sql_query("postgres_query") + sql_obj = sql_spec.get_sql("postgres_query") + assert isinstance(sql_obj, SQL) + + def test_get_sql_not_found(self) -> None: + """Test getting a SQL query that doesn't exist.""" + sql_spec = SQLSpec() + + with pytest.raises(SQLFileNotFoundError): + sql_spec.get_sql("nonexistent_query") + + def test_list_sql_queries_empty(self) -> None: + """Test listing queries when none are loaded.""" + sql_spec = SQLSpec() + assert sql_spec.list_sql_queries() == [] + + def test_list_sql_queries_with_queries(self) -> None: + """Test listing queries after adding some.""" + sql_spec = SQLSpec() + + sql_spec.add_named_sql("query_a", "SELECT 1") + sql_spec.add_named_sql("query_b", "SELECT 2") + + queries = sql_spec.list_sql_queries() + assert sorted(queries) == ["query_a", "query_b"] + + def test_has_sql_query_empty(self) -> None: + """Test checking for query existence when none are loaded.""" + sql_spec = SQLSpec() + assert not sql_spec.has_sql_query("any_query") + + def test_has_sql_query_with_queries(self) -> None: + """Test checking for query existence after adding some.""" + sql_spec = SQLSpec() + + sql_spec.add_named_sql("existing_query", "SELECT 1") + + assert sql_spec.has_sql_query("existing_query") + assert not sql_spec.has_sql_query("nonexistent_query") + + def test_clear_sql_cache_no_loader(self) -> None: + """Test clearing cache when no loader exists.""" + sql_spec = SQLSpec() + # Should not raise an error + sql_spec.clear_sql_cache() + + def test_clear_sql_cache_with_loader(self) -> None: + """Test clearing cache with existing loader.""" + sql_spec = SQLSpec() + sql_spec.add_named_sql("test_query", "SELECT 1") + + # Verify query exists + assert sql_spec.has_sql_query("test_query") + + # Clear cache + sql_spec.clear_sql_cache() + + # Query should be gone + assert not sql_spec.has_sql_query("test_query") + + def test_reload_sql_files_no_loader(self) -> None: + """Test reloading files when no loader exists.""" + sql_spec = SQLSpec() + # Should not raise an error + sql_spec.reload_sql_files() + + def test_reload_sql_files_with_loader(self) -> None: + """Test reloading files with existing loader.""" + sql_spec = SQLSpec() + sql_spec.add_named_sql("test_query", "SELECT 1") + + # Verify query exists + assert sql_spec.has_sql_query("test_query") + + # Reload (which clears cache) + sql_spec.reload_sql_files() + + # Query should be gone + assert not sql_spec.has_sql_query("test_query") + + def test_get_sql_files_empty(self) -> None: + """Test getting file list when none are loaded.""" + sql_spec = SQLSpec() + assert sql_spec.get_sql_files() == [] + + def test_load_sql_files(self) -> None: + """Test loading SQL files from a directory.""" + sql_spec = SQLSpec() + + # Create a temporary SQL file + with tempfile.NamedTemporaryFile(mode="w", suffix=".sql", delete=False) as tf: + tf.write(""" +-- name: test_query +SELECT id, name FROM users WHERE active = true; + +-- name: count_users +SELECT COUNT(*) as total FROM users; +""") + tf.flush() + temp_path = Path(tf.name) + + try: + # Load the file + sql_spec.load_sql_files(temp_path) + + # Verify queries were loaded + queries = sql_spec.list_sql_queries() + assert "test_query" in queries + assert "count_users" in queries + + # Verify we can retrieve the queries + test_sql = sql_spec.get_sql("test_query") + assert isinstance(test_sql, SQL) + assert "SELECT id, name FROM users" in test_sql.sql + + finally: + # Clean up + temp_path.unlink() + + def test_provided_loader_is_used(self) -> None: + """Test that a provided loader is used instead of creating a new one.""" + # Create a mock loader + mock_loader = Mock(spec=SQLFileLoader) + mock_loader.list_queries.return_value = ["mock_query"] + mock_loader.has_query.return_value = True + + sql_spec = SQLSpec(loader=mock_loader) + + # Test that the mock loader is used + queries = sql_spec.list_sql_queries() + assert queries == ["mock_query"] + mock_loader.list_queries.assert_called_once() + + has_query = sql_spec.has_sql_query("test") + assert has_query is True + mock_loader.has_query.assert_called_once_with("test") + + def test_sql_integration_with_existing_functionality(self) -> None: + """Test that SQL loading doesn't interfere with existing SQLSpec functionality.""" + from sqlspec.adapters.sqlite import SqliteConfig + + sql_spec = SQLSpec() + + # Add a database configuration + config = SqliteConfig(pool_config={"database": ":memory:"}) + sql_spec.add_config(config) + + # Add some SQL queries + sql_spec.add_named_sql("get_users", "SELECT * FROM users") + sql_spec.add_named_sql("count_users", "SELECT COUNT(*) FROM users") + + # Verify both database and SQL functionality work + retrieved_config = sql_spec.get_config(SqliteConfig) + assert retrieved_config is config + + assert sql_spec.has_sql_query("get_users") + sql_obj = sql_spec.get_sql("get_users") + assert isinstance(sql_obj, SQL) + + # Test that we can get a session and it has the expected interface + with sql_spec.provide_session(config) as session: + # Should be able to execute SQL through the session + assert hasattr(session, "execute") + + def test_sql_loader_cleanup_on_cache_clear(self) -> None: + """Test proper cleanup when clearing SQL cache.""" + sql_spec = SQLSpec() + + # Add some queries to create loader + sql_spec.add_named_sql("query1", "SELECT 1") + sql_spec.add_named_sql("query2", "SELECT 2") + + # Verify loader exists and has queries + assert sql_spec._sql_loader is not None + assert len(sql_spec.list_sql_queries()) == 2 + + # Clear cache + sql_spec.clear_sql_cache() + + # Loader should still exist but queries should be gone + assert sql_spec._sql_loader is not None + assert len(sql_spec.list_sql_queries()) == 0 + + @patch("sqlspec.base.logger") + def test_logging_integration(self, mock_logger: Mock) -> None: + """Test that SQL operations are properly logged.""" + sql_spec = SQLSpec() + + # Test add_named_sql logging + sql_spec.add_named_sql("test_query", "SELECT 1") + mock_logger.debug.assert_called_with("Added named SQL: %s", "test_query") + + # Test load_sql_files logging + with tempfile.NamedTemporaryFile(mode="w", suffix=".sql", delete=False) as tf: + tf.write("-- name: file_query\nSELECT 1;") + tf.flush() + temp_path = Path(tf.name) + + try: + sql_spec.load_sql_files(temp_path) + mock_logger.debug.assert_called_with("Loaded SQL files: %s", (temp_path,)) + finally: + temp_path.unlink() + + # Test clear_sql_cache logging + sql_spec.clear_sql_cache() + mock_logger.debug.assert_called_with("Cleared SQL cache") + + def test_backwards_compatibility(self) -> None: + """Test that existing SQLSpec usage patterns still work.""" + # This test ensures we haven't broken existing functionality + from sqlspec.adapters.sqlite import SqliteConfig + + # Original usage pattern should still work + sql_spec = SQLSpec() + config = SqliteConfig(pool_config={"database": ":memory:"}) + sql_spec.add_config(config) + + # Should be able to get connection and session as before + with sql_spec.provide_session(config) as session: + assert hasattr(session, "execute") + + # Cache functionality should still work + original_cache_config = sql_spec.get_cache_config() + assert original_cache_config is not None + + # New SQL functionality should be additive + sql_spec.add_named_sql("new_query", "SELECT 1") + assert sql_spec.has_sql_query("new_query") + + def test_error_propagation(self) -> None: + """Test that SQL loader errors are properly propagated.""" + sql_spec = SQLSpec() + + # Test error from underlying loader + with pytest.raises(ValueError, match="already exists"): + sql_spec.add_named_sql("duplicate", "SELECT 1") + sql_spec.add_named_sql("duplicate", "SELECT 2") # Should raise + + def test_name_normalization_consistency(self) -> None: + """Test that name normalization works consistently.""" + sql_spec = SQLSpec() + + # Add query with hyphens + sql_spec.add_named_sql("user-profile-query", "SELECT * FROM user_profiles") + + # Should be able to find it with normalized name + assert sql_spec.has_sql_query("user_profile_query") + assert sql_spec.has_sql_query("user-profile-query") # Original name should also work + + sql_obj = sql_spec.get_sql("user_profile_query") + assert isinstance(sql_obj, SQL) diff --git a/tests/unit/test_loader/test_fixtures_directory_loading.py b/tests/unit/test_loader/test_fixtures_directory_loading.py new file mode 100644 index 00000000..392bbb55 --- /dev/null +++ b/tests/unit/test_loader/test_fixtures_directory_loading.py @@ -0,0 +1,691 @@ +"""Comprehensive tests for loading entire fixtures directory. + +Tests the SQLFileLoader's ability to handle real-world SQL files including: +- Complex PostgreSQL and MySQL queries +- Multiple parameter styles (:param, @param) +- CTEs and advanced SQL features +- Directory structure with namespaces +- Mixed dialect SQL files +""" + +import time +from pathlib import Path +from typing import Any + +import pytest + +from sqlspec.core.statement import SQL +from sqlspec.loader import SQLFileLoader + +try: + from rich.console import Console + + console = Console() +except ImportError: + # Fallback if rich is not available + class MockConsole: + def print(self, *args: Any, **kwargs: Any) -> None: + pass + + console = MockConsole() + + +@pytest.fixture +def fixtures_path() -> Path: + """Get path to test fixtures directory.""" + return Path(__file__).parent.parent.parent / "fixtures" + + +def test_load_entire_fixtures_directory(fixtures_path: Path) -> None: + """Test loading the entire fixtures directory successfully.""" + loader = SQLFileLoader() + + # Load the entire fixtures directory + start_time = time.perf_counter() + try: + loader.load_sql(fixtures_path) + load_time = time.perf_counter() - start_time + except Exception as e: + # If there are storage backend issues, test with individual files + pytest.skip(f"Storage backend issue, skipping directory test: {e}") + return + + # Should complete in reasonable time + assert load_time < 5.0, f"Loading took too long: {load_time:.3f}s" + + # Should have loaded queries + queries = loader.list_queries() + assert len(queries) > 0, "No queries were loaded" + + # Should have queries from different subdirectories (namespaces) + postgres_queries = [q for q in queries if q.startswith("postgres.")] + mysql_queries = [q for q in queries if q.startswith("mysql.")] + root_queries = [q for q in queries if "." not in q] + + assert len(postgres_queries) > 0, "No PostgreSQL queries found" + assert len(mysql_queries) > 0, "No MySQL queries found" + assert len(root_queries) > 0, "No root-level queries found" + + console.print(f"[green]✓[/green] Loaded {len(queries)} queries in {load_time:.3f}s") + console.print(f" • {len(postgres_queries)} PostgreSQL queries") + console.print(f" • {len(mysql_queries)} MySQL queries") + console.print(f" • {len(root_queries)} root-level queries") + + +def test_complex_postgresql_queries(fixtures_path: Path) -> None: + """Test that complex PostgreSQL queries load and create valid SQL objects.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + # Get all PostgreSQL queries and test for complexity + queries = loader.list_queries() + postgres_queries = [q for q in queries if q.startswith("postgres.")] + + found_complex = 0 + for query_name in postgres_queries: + sql = loader.get_sql(query_name) + assert isinstance(sql, SQL) + assert len(sql.sql.strip()) > 0 + + # Should contain typical PostgreSQL patterns + sql_text = sql.sql.upper() + # CTEs, complex joins, or PostgreSQL-specific functions + if any( + pattern in sql_text + for pattern in ["WITH", "CTE", "PG_", "CURRENT_DATABASE", "ARRAY_AGG", "INFORMATION_SCHEMA", "SELECT"] + ): + found_complex += 1 + if found_complex >= 3: # Stop after finding a few + break + + # Should find at least one complex query + assert found_complex > 0, "No complex PostgreSQL queries found" + console.print(f"[green]✓[/green] Validated {found_complex} complex PostgreSQL queries") + + +def test_complex_mysql_queries(fixtures_path: Path) -> None: + """Test that complex MySQL queries load and create valid SQL objects.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + # Get all MySQL queries and test for complexity + queries = loader.list_queries() + mysql_queries = [q for q in queries if q.startswith("mysql.")] + + found_complex = 0 + for query_name in mysql_queries: + sql = loader.get_sql(query_name) + assert isinstance(sql, SQL) + assert len(sql.sql.strip()) > 0 + + # Should contain typical MySQL patterns + sql_text = sql.sql.upper() + # MySQL hints, information_schema, or MySQL-specific functions + if any( + pattern in sql_text + for pattern in ["INFORMATION_SCHEMA", "MAX_EXECUTION_TIME", "@", "GROUP_CONCAT", "SELECT"] + ): + found_complex += 1 + if found_complex >= 3: # Stop after finding a few + break + + # Should find at least one complex query + assert found_complex > 0, "No complex MySQL queries found" + console.print(f"[green]✓[/green] Validated {found_complex} complex MySQL queries") + + +def test_parameter_styles_detection(fixtures_path: Path) -> None: + """Test that different parameter styles are preserved correctly.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + queries = loader.list_queries() + + # Find queries with different parameter styles + colon_param_queries = [] + at_param_queries = [] + other_param_queries = [] + + for query_name in queries: + try: + sql = loader.get_sql(query_name) + sql_text = sql.sql + + if ":" in sql_text and any( + pattern in sql_text for pattern in [":PKEY", ":DMA_SOURCE_ID", ":database_name"] + ): + colon_param_queries.append(query_name) + elif "@" in sql_text and any( + pattern in sql_text for pattern in ["@PKEY", "@DMA_SOURCE_ID", "@target_schema"] + ): + at_param_queries.append(query_name) + elif any(pattern in sql_text for pattern in ["?", "$1", "%s"]): + other_param_queries.append(query_name) + except Exception: + # Skip queries that can't be retrieved + continue + + # Report findings + console.print(f"[blue]Found {len(colon_param_queries)} queries with colon parameters[/blue]") + console.print(f"[blue]Found {len(at_param_queries)} queries with at parameters[/blue]") + console.print(f"[blue]Found {len(other_param_queries)} queries with other parameter styles[/blue]") + + +def test_namespace_organization(fixtures_path: Path) -> None: + """Test that directory structure creates proper namespaces.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + queries = loader.list_queries() + + # Group by namespace + namespaces: dict[str, list[str]] = {} + for query in queries: + if "." in query: + namespace = query.split(".")[0] + namespaces.setdefault(namespace, []).append(query) + else: + namespaces.setdefault("root", []).append(query) + + # Should have some organized structure + assert len(namespaces) > 0, "No namespaces found" + + console.print("[bold]Namespaces found:[/bold]") + for namespace, ns_queries in namespaces.items(): + console.print(f" • [cyan]{namespace}[/cyan]: {len(ns_queries)} queries") + + +def test_asset_maintenance_query(fixtures_path: Path) -> None: + """Test the specific asset maintenance query we created.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + if loader.has_query("asset_maintenance_alert"): + sql = loader.get_sql("asset_maintenance_alert") + + assert isinstance(sql, SQL) + assert "inserted_data" in sql.sql + assert ":date_start" in sql.sql + assert ":date_end" in sql.sql + assert "alert_users" in sql.sql + assert "CONFLICT" in sql.sql.upper() + console.print("[green]✓[/green] Asset maintenance query validated") + else: + console.print("[yellow]Asset maintenance query not found in fixtures[/yellow]") + + +def test_query_text_retrieval(fixtures_path: Path) -> None: + """Test retrieving raw SQL text for queries.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + queries = loader.list_queries() + sample_queries = queries[:5] if len(queries) >= 5 else queries + + tested_count = 0 + for query_name in sample_queries: + try: + # Test get_query_text + text = loader.get_query_text(query_name) + assert isinstance(text, str) + assert len(text.strip()) > 0 + + # Should match the SQL object's text + sql = loader.get_sql(query_name) + assert text == sql.sql + tested_count += 1 + except Exception: + # Skip queries that can't be retrieved + continue + + assert tested_count > 0, "No queries could be tested for text retrieval" + console.print(f"[green]✓[/green] Validated text retrieval for {tested_count} queries") + + +def test_file_metadata_tracking(fixtures_path: Path) -> None: + """Test that file metadata is properly tracked.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + files = loader.list_files() + queries = loader.list_queries() + + assert len(files) > 0, "No files tracked" + + # Every query should have a source file + sample_queries = queries[:10] if len(queries) >= 10 else queries + tested_count = 0 + for query_name in sample_queries: + try: + file_info = loader.get_file_for_query(query_name) + if file_info is not None: + assert file_info.path in files, f"File {file_info.path} not in files list" + tested_count += 1 + except Exception: + # Skip queries that can't be retrieved + continue + + assert tested_count > 0, "No queries could be tested for file metadata" + console.print(f"[green]✓[/green] Validated metadata for {tested_count} queries from {len(files)} files") + + +def test_performance_benchmarks(fixtures_path: Path) -> None: + """Test that loading performance meets expectations.""" + loader = SQLFileLoader() + + # Measure loading time + start_time = time.perf_counter() + try: + loader.load_sql(fixtures_path) + load_time = time.perf_counter() - start_time + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + queries = loader.list_queries() + + # Performance expectations + assert load_time < 5.0, f"Loading too slow: {load_time:.3f}s" + assert len(queries) > 0, "No queries loaded" + + # Measure query retrieval time if we have queries + if queries: + sample_queries = queries[:20] if len(queries) >= 20 else queries + start_time = time.perf_counter() + successful_retrievals = 0 + for query_name in sample_queries: + try: + loader.get_sql(query_name) + successful_retrievals += 1 + except Exception: + # Skip queries that can't be retrieved + continue + retrieval_time = time.perf_counter() - start_time + + if successful_retrievals > 0: + avg_retrieval_time = retrieval_time / successful_retrievals + assert avg_retrieval_time < 0.01, f"Query retrieval too slow: {avg_retrieval_time:.6f}s per query" + + console.print("[green]Performance metrics:[/green]") + console.print(f" • Load time: {load_time:.3f}s for {len(queries)} queries") + console.print( + f" • Avg retrieval: {avg_retrieval_time:.6f}s per query ({successful_retrievals} successful)" + ) + else: + console.print("[yellow]Warning: No queries could be retrieved for performance testing[/yellow]") + + +def test_reload_and_cache_behavior(fixtures_path: Path) -> None: + """Test reloading behavior and cache efficiency.""" + loader = SQLFileLoader() + + # First load + start_time = time.perf_counter() + try: + loader.load_sql(fixtures_path) + first_load_time = time.perf_counter() - start_time + first_query_count = len(loader.list_queries()) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + # Second load (should use cache where possible) + start_time = time.perf_counter() + loader.load_sql(fixtures_path) + second_load_time = time.perf_counter() - start_time + second_query_count = len(loader.list_queries()) + + # Query count should be the same + assert first_query_count == second_query_count + + # Second load might be faster due to caching, but not required + console.print(f"[dim]Load times: first={first_load_time:.3f}s, second={second_load_time:.3f}s[/dim]") + + +def test_mixed_dialect_queries(fixtures_path: Path) -> None: + """Test that queries from different SQL dialects coexist properly.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + queries = loader.list_queries() + + # Get samples from different namespaces/dialects + postgres_query = next((q for q in queries if q.startswith("postgres.")), None) + mysql_query = next((q for q in queries if q.startswith("mysql.")), None) + + if postgres_query and mysql_query: + try: + # Both should create valid SQL objects + pg_sql = loader.get_sql(postgres_query) + mysql_sql = loader.get_sql(mysql_query) + + assert isinstance(pg_sql, SQL) + assert isinstance(mysql_sql, SQL) + + # Should have different characteristics + assert pg_sql.sql != mysql_sql.sql + + console.print(f"[green]✓[/green] Tested mixed dialects: {postgres_query}, {mysql_query}") + except Exception as e: + console.print(f"[yellow]Could not retrieve mixed dialect queries: {e}[/yellow]") + else: + console.print("[yellow]Could not find both PostgreSQL and MySQL queries for mixed dialect test[/yellow]") + + +def test_specific_real_world_patterns(fixtures_path: Path) -> None: + """Test specific real-world SQL patterns found in fixtures.""" + loader = SQLFileLoader() + try: + loader.load_sql(fixtures_path) + except Exception: + pytest.skip("Storage backend issue, skipping test") + return + + queries = loader.list_queries() + + # Count different SQL pattern occurrences + pattern_counts = { + "ctes": 0, # Common Table Expressions + "hints": 0, # MySQL hints + "params_colon": 0, # :parameter style + "params_at": 0, # @parameter style + "pg_functions": 0, # PostgreSQL specific functions + "info_schema": 0, # information_schema usage + "selects": 0, # Basic SELECT statements + } + + for query_name in queries: + try: + sql = loader.get_sql(query_name) + sql_text = sql.sql.upper() + original_sql = sql.sql + + if "WITH " in sql_text: + pattern_counts["ctes"] += 1 + if "/*+" in original_sql: # Preserve case for hints + pattern_counts["hints"] += 1 + if ":" in original_sql: + pattern_counts["params_colon"] += 1 + if "@" in original_sql: + pattern_counts["params_at"] += 1 + if "PG_" in sql_text or "CURRENT_DATABASE" in sql_text: + pattern_counts["pg_functions"] += 1 + if "INFORMATION_SCHEMA" in sql_text: + pattern_counts["info_schema"] += 1 + if "SELECT" in sql_text: + pattern_counts["selects"] += 1 + except Exception: + # Skip queries that can't be retrieved + continue + + # At least some patterns should be found + total_patterns = sum(pattern_counts.values()) + assert total_patterns > 0, "No real-world SQL patterns found" + + console.print("[bold]Real-world patterns found:[/bold]") + for pattern, count in pattern_counts.items(): + console.print(f" • [yellow]{pattern}[/yellow]: {count} queries") + + +def test_simulated_complex_queries() -> None: + """Test with simulated complex queries based on fixtures content.""" + loader = SQLFileLoader() + + # Add realistic PostgreSQL query with CTEs and parameters + postgres_cte_query = """ +with db as ( + select db.oid as database_oid, + db.datname as database_name, + pg_database_size(db.datname) as total_disk_size_bytes + from pg_database db + where datname = current_database() +), +db_stats as ( + select s.datid as database_oid, + s.numbackends as backends_connected, + s.xact_commit as txn_commit_count + from pg_stat_database s + where s.datname = :database_name +) +select db.*, stats.backends_connected +from db +join db_stats stats on db.database_oid = stats.database_oid +where db.database_oid = :target_oid +""" + + # Add realistic MySQL query with hints and different parameter style + mysql_query = """ +select + /*+ MAX_EXECUTION_TIME(5000) */ + @PKEY as pkey, + @DMA_SOURCE_ID as dma_source_id, + src.table_schema as table_schema, + src.total_table_count as total_table_count +from ( + select + table_schema, + count(*) as total_table_count, + sum(case when engine = 'InnoDB' then 1 else 0 end) as innodb_table_count + from information_schema.tables + where table_schema = @target_schema + group by table_schema +) src +""" + + # Add complex insert query with CONFLICT handling (like our asset maintenance) + conflict_query = """ +with inserted_data as ( +insert into alert_users (user_id, asset_maintenance_id, alert_definition_id) +select responsible_id, id, + (select id from alert_definition where name = 'maintenances_today') +from asset_maintenance +where planned_date_start between :date_start and :date_end + and cancelled = False +ON CONFLICT ON CONSTRAINT unique_alert DO NOTHING +returning *) +select inserted_data.*, to_jsonb(users.*) as user +from inserted_data +left join users on users.id = inserted_data.user_id +""" + + # Load all the queries + loader.add_named_sql("postgres_cte_complex", postgres_cte_query.strip()) + loader.add_named_sql("mysql_hint_complex", mysql_query.strip()) + loader.add_named_sql("conflict_handling_complex", conflict_query.strip()) + + # Test each query + queries = loader.list_queries() + assert len(queries) == 3 + + # PostgreSQL query tests + pg_sql = loader.get_sql("postgres_cte_complex") + assert isinstance(pg_sql, SQL) + assert "WITH" in pg_sql.sql.upper() + assert ":database_name" in pg_sql.sql + assert ":target_oid" in pg_sql.sql + assert "pg_database_size" in pg_sql.sql + + # MySQL query tests + mysql_sql = loader.get_sql("mysql_hint_complex") + assert isinstance(mysql_sql, SQL) + assert "/*+" in mysql_sql.sql # MySQL hint + assert "@PKEY" in mysql_sql.sql + assert "@DMA_SOURCE_ID" in mysql_sql.sql + assert "information_schema" in mysql_sql.sql.lower() + + # Conflict handling query tests + conflict_sql = loader.get_sql("conflict_handling_complex") + assert isinstance(conflict_sql, SQL) + assert "CONFLICT" in conflict_sql.sql.upper() + assert ":date_start" in conflict_sql.sql + assert ":date_end" in conflict_sql.sql + assert "to_jsonb" in conflict_sql.sql + + # Verify no parameters are pre-loaded + for sql_obj in [pg_sql, mysql_sql, conflict_sql]: + assert sql_obj.parameters == [] + + console.print("[green]✓[/green] All simulated complex queries loaded and validated successfully") + + +def test_query_name_normalization_with_hyphens() -> None: + """Test that fixture-style query names with hyphens are normalized properly.""" + loader = SQLFileLoader() + + # Simulate names from actual fixtures + fixture_names = [ + "collection-postgres-base-database-details", + "collection-mysql-database-details", + "collection-aws-extension-dependency", + "asset-maintenance-alert", + ] + + for name in fixture_names: + loader.add_named_sql(name, f"SELECT '{name}' as query_name") + + # All should be accessible with hyphens or underscores + for original_name in fixture_names: + underscore_name = original_name.replace("-", "_") + + # The normalized (underscore) version should be stored + assert loader.has_query(underscore_name), f"Should have normalized name: {underscore_name}" + + # Should also be accessible with original hyphenated name (via normalization) + assert loader.has_query(original_name), f"Should normalize and find: {original_name}" + + # Should return same SQL object + sql1 = loader.get_sql(original_name) + sql2 = loader.get_sql(underscore_name) + assert sql1.sql == sql2.sql + + console.print(f"[green]✓[/green] All {len(fixture_names)} hyphenated names normalize correctly") + + +def test_large_query_handling() -> None: + """Test handling of large, complex SQL queries like those in fixtures.""" + loader = SQLFileLoader() + + # Create a large, complex query similar to fixture patterns + large_query = """ +-- Complex query with multiple CTEs, joins, and aggregations +with database_metrics as ( + select + d.oid as database_oid, + d.datname as database_name, + pg_database_size(d.datname) as size_bytes, + pg_stat_get_db_numbackends(d.oid) as active_connections + from pg_database d + where d.datallowconn and not d.datistemplate +), +table_metrics as ( + select + schemaname, + tablename, + n_tup_ins + n_tup_upd + n_tup_del as total_modifications, + n_tup_hot_upd as hot_updates, + n_dead_tup as dead_tuples, + last_vacuum, + last_autovacuum, + last_analyze, + last_autoanalyze + from pg_stat_user_tables + where schemaname not in ('information_schema', 'pg_catalog') +), +index_metrics as ( + select + schemaname, + tablename, + indexrelname, + idx_tup_read, + idx_tup_fetch, + idx_blks_read, + idx_blks_hit, + round(100.0 * idx_blks_hit / nullif(idx_blks_hit + idx_blks_read, 0), 2) as hit_ratio + from pg_stat_user_indexes +), +aggregated_stats as ( + select + dm.database_name, + dm.size_bytes, + dm.active_connections, + count(distinct tm.tablename) as table_count, + sum(tm.total_modifications) as total_table_modifications, + count(distinct im.indexrelname) as index_count, + avg(im.hit_ratio) as avg_index_hit_ratio + from database_metrics dm + cross join table_metrics tm + left join index_metrics im on tm.schemaname = im.schemaname + and tm.tablename = im.tablename + where dm.database_name = current_database() + and tm.total_modifications > :min_modifications + and (im.hit_ratio is null or im.hit_ratio > :min_hit_ratio) + group by dm.database_name, dm.size_bytes, dm.active_connections +) +select + as_.*, + case + when as_.avg_index_hit_ratio > 95 then 'excellent' + when as_.avg_index_hit_ratio > 85 then 'good' + when as_.avg_index_hit_ratio > 70 then 'fair' + else 'poor' + end as performance_rating, + round(as_.size_bytes / 1024.0 / 1024.0, 2) as size_mb +from aggregated_stats as_ +where as_.table_count > 0 +order by as_.size_bytes desc +limit :result_limit +""" + + loader.add_named_sql("large_database_analysis", large_query.strip()) + + # Should handle the large query without issues + sql = loader.get_sql("large_database_analysis") + assert isinstance(sql, SQL) + assert len(sql.sql) > 1000 # Should be a substantial query + assert sql.sql.count("select") >= 4 # Multiple SELECT statements + assert sql.sql.count("with") >= 1 # Has CTEs + assert ":min_modifications" in sql.sql + assert ":min_hit_ratio" in sql.sql + assert ":result_limit" in sql.sql + + # Should parse quickly even for large queries + start_time = time.perf_counter() + for _ in range(100): + loader.get_sql("large_database_analysis") + elapsed = time.perf_counter() - start_time + + assert elapsed < 0.1, f"Large query retrieval too slow: {elapsed:.3f}s for 100 calls" + console.print(f"[green]✓[/green] Large query ({len(sql.sql)} chars) handled efficiently") + console.print(f" • Performance: {elapsed * 1000:.1f}ms for 100 calls ({elapsed * 10.0:.1f}ms per call)") diff --git a/tests/unit/test_loader/test_sql_file_loader.py b/tests/unit/test_loader/test_sql_file_loader.py index 6bc4cba8..0b7c745b 100644 --- a/tests/unit/test_loader/test_sql_file_loader.py +++ b/tests/unit/test_loader/test_sql_file_loader.py @@ -17,7 +17,6 @@ import pytest -from sqlspec.core.parameters import ParameterStyle from sqlspec.core.statement import SQL from sqlspec.exceptions import SQLFileNotFoundError, SQLFileParseError from sqlspec.loader import CachedSQLFile, NamedStatement, SQLFile, SQLFileLoader @@ -460,16 +459,17 @@ def test_get_sql_basic() -> None: assert "SELECT * FROM users WHERE id = ?" in sql.sql -def test_get_sql_with_parameters() -> None: - """Test getting SQL with parameters.""" +def test_get_sql_simplified() -> None: + """Test getting SQL without parameters (simplified interface).""" loader = SQLFileLoader() loader.add_named_sql("test_query", "SELECT * FROM users WHERE id = :user_id") - sql = loader.get_sql("test_query", {"user_id": 123}) + sql = loader.get_sql("test_query") assert isinstance(sql, SQL) - # Parameters are wrapped in CORE_ROUND_3 architecture - assert sql.parameters == {"parameters": {"user_id": 123}} + assert "SELECT * FROM users WHERE id = :user_id" in sql.sql + # Parameters should be passed during execution, not creation + assert sql.parameters == [] def test_get_sql_with_dialect() -> None: @@ -485,17 +485,6 @@ def test_get_sql_with_dialect() -> None: # assert sql.dialect == "postgres" -def test_get_sql_with_dialect_override() -> None: - """Test overriding dialect in get_sql.""" - loader = SQLFileLoader() - loader.add_named_sql("test_query", "SELECT * FROM users", dialect="postgres") - - sql = loader.get_sql("test_query", dialect="mysql") - - assert isinstance(sql, SQL) - assert sql._dialect == "mysql" # Override should take precedence - - def test_get_sql_parameter_style_detection() -> None: """Test parameter style detection and preservation.""" loader = SQLFileLoader() @@ -534,6 +523,41 @@ def test_get_sql_name_normalization() -> None: assert isinstance(sql2, SQL) +def test_get_sql_usage_pattern() -> None: + """Test the simplified usage pattern for get_sql method.""" + loader = SQLFileLoader() + + # Add the asset maintenance alert query + asset_maintenance_query = """ +with inserted_data as ( +insert into alert_users (user_id, asset_maintenance_id, alert_definition_id) +select responsible_id, id, (select id from alert_definition where name = 'maintenances_today') from asset_maintenance +where planned_date_start is not null +and planned_date_start between :date_start and :date_end +and cancelled = False ON CONFLICT ON CONSTRAINT unique_alert DO NOTHING +returning *) +select inserted_data.*, to_jsonb(users.*) as user +from inserted_data +left join users on users.id = inserted_data.user_id; +""" + + loader.add_named_sql("asset_maintenance_alert", asset_maintenance_query.strip()) + + # Test the simplified usage pattern + sql = loader.get_sql("asset_maintenance_alert") + + assert isinstance(sql, SQL) + assert "inserted_data" in sql.sql + assert ":date_start" in sql.sql + assert ":date_end" in sql.sql + assert "alert_users" in sql.sql + + # Verify no parameters are pre-loaded in the SQL object + assert sql.parameters == [] + + # The SQL should be ready for execution with parameters passed at runtime + + def test_get_file_methods() -> None: """Test file retrieval methods.""" loader = SQLFileLoader() @@ -557,24 +581,16 @@ def test_get_file_methods() -> None: assert loader.get_file_for_query("nonexistent") is None -@patch("sqlspec.loader.ParameterValidator") -def test_parameter_style_detection_with_validator(mock_validator_class: Mock) -> None: - """Test parameter style detection using ParameterValidator.""" - mock_validator = Mock() - mock_validator.extract_parameters.return_value = [ - Mock(style=ParameterStyle.QMARK), - Mock(style=ParameterStyle.QMARK), - ] - mock_validator_class.return_value = mock_validator - +def test_parameter_style_detection_simplified() -> None: + """Test that SQL objects are created without parameter style detection.""" loader = SQLFileLoader() loader.add_named_sql("test_query", "SELECT * FROM users WHERE id = ? AND active = ?") sql = loader.get_sql("test_query") assert isinstance(sql, SQL) - # Should have called parameter validator - mock_validator.extract_parameters.assert_called_once() + # Simplified loader should just create basic SQL object + assert "SELECT * FROM users WHERE id = ? AND active = ?" in sql.sql def test_dialect_normalization() -> None: @@ -956,7 +972,8 @@ def test_fixture_parameter_extraction(self, fixtures_path: Path) -> None: queries = loader.list_queries() test_query = queries[0] # Get first query - # Create SQL with parameters - sql = loader.get_sql(test_query, {"DMA_SOURCE_ID": "test", "PKEY": "pk123"}) + # Create SQL object without parameters + sql = loader.get_sql(test_query) assert isinstance(sql, SQL) - assert sql.parameters is not None + # Parameters should be passed during execution + assert sql.parameters == [] diff --git a/uv.lock b/uv.lock index 5ca05487..b4d7b5dc 100644 --- a/uv.lock +++ b/uv.lock @@ -4657,7 +4657,7 @@ wheels = [ [[package]] name = "sphinx-prompt" -version = "1.10.0" +version = "1.10.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14'", @@ -4675,9 +4675,9 @@ dependencies = [ { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "urllib3", marker = "python_full_version >= '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c7/2b/8f3a87784e6313e48b4d91dfb4aae1e5af3fa0c94ef9e875eb2e471e1418/sphinx_prompt-1.10.0.tar.gz", hash = "sha256:23dca4c07ade840c9e87089d79d3499040fa524b3c422941427454e215fdd111", size = 5181, upload-time = "2025-06-24T08:32:18.684Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/1b/ceb8d295275982121eefc8a4f0b71edf7f26c16f5bc53f4204527f173783/sphinx_prompt-1.10.1.tar.gz", hash = "sha256:2b8a18433ee3d9f2a7c4e51a768fb14c5177cce4bf0e2298ecc068a0ca9918bf", size = 5242, upload-time = "2025-08-13T09:21:35.971Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/5e/f359e06019dbf0d7f8e23f46c535085c7dc367190a7e19456a09a0153a70/sphinx_prompt-1.10.0-py3-none-any.whl", hash = "sha256:d62f7a1aa346225d30222a271dc78997031204a5f199ce5006c14ece0d94b217", size = 5308, upload-time = "2025-06-24T08:32:17.768Z" }, + { url = "https://files.pythonhosted.org/packages/33/23/dca289e67226222ce988f7a4a31cae0c0f4752338ead0c1e509203658080/sphinx_prompt-1.10.1-py3-none-any.whl", hash = "sha256:6cd34abbeedc14446bba76036b1821da142052f7043c5a8a3d6b84565ee54900", size = 7428, upload-time = "2025-08-13T09:21:35.192Z" }, ] [[package]] @@ -4737,7 +4737,7 @@ dependencies = [ { name = "sphinx-jinja2-compat" }, { name = "sphinx-prompt", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx-prompt", version = "1.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, - { name = "sphinx-prompt", version = "1.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-prompt", version = "1.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-tabs" }, { name = "tabulate" }, { name = "typing-extensions" }, @@ -4876,11 +4876,11 @@ asyncio = [ [[package]] name = "sqlglot" -version = "27.6.0" +version = "27.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/aa/0c93a9974c0dc0726aa1a3ebc956f88bc5f6694d811b23d9a31085bb812b/sqlglot-27.6.0.tar.gz", hash = "sha256:413bc6c9978710bd97110e9a4363c517dfb92954d0a9fd2922e87c5707395deb", size = 5409152, upload-time = "2025-08-01T16:03:12.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/0a/6baff44ba014f250c16a0707deb9ebd14a384ebc7afecb95cc3ef20e1c7b/sqlglot-27.7.0.tar.gz", hash = "sha256:6bedac6c57d1b89c3ca2c392e0235305ea1a35b43fced91b6ef485dc64877ae1", size = 5414964, upload-time = "2025-08-13T16:06:54.562Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/58/8b267b6b3c27ccb16367b49db2ac8c6032939f4fbffdc66dd0831833a7b9/sqlglot-27.6.0-py3-none-any.whl", hash = "sha256:828e6eea4e608f221c14f9e5dbe2f9f73142f11ba289d84f58c56ca44ee49d23", size = 496769, upload-time = "2025-08-01T16:03:09.914Z" }, + { url = "https://files.pythonhosted.org/packages/e5/88/536ae39a5abfcdf8b3343bfc309dbafe1b9b72653a62b11e539f34f3d16f/sqlglot-27.7.0-py3-none-any.whl", hash = "sha256:cc4ee8fb780636a6f2d5b5c6624e0466ba38b0a8e6588f560ee229a71e33234d", size = 499485, upload-time = "2025-08-13T16:06:51.738Z" }, ] [package.optional-dependencies] @@ -4957,7 +4957,7 @@ wheels = [ [[package]] name = "sqlspec" -version = "0.16.2" +version = "0.16.3" source = { editable = "." } dependencies = [ { name = "eval-type-backport", marker = "python_full_version < '3.10'" }, @@ -5624,16 +5624,17 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.33.1" +version = "20.34.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/60/4f20960df6c7b363a18a55ab034c8f2bcd5d9770d1f94f9370ec104c1855/virtualenv-20.33.1.tar.gz", hash = "sha256:1b44478d9e261b3fb8baa5e74a0ca3bc0e05f21aa36167bf9cbf850e542765b8", size = 6082160, upload-time = "2025-08-05T16:10:55.605Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/14/37fcdba2808a6c615681cd216fecae00413c9dab44fb2e57805ecf3eaee3/virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a", size = 6003808, upload-time = "2025-08-13T14:24:07.464Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/ff/ded57ac5ff40a09e6e198550bab075d780941e0b0f83cbeabd087c59383a/virtualenv-20.33.1-py3-none-any.whl", hash = "sha256:07c19bc66c11acab6a5958b815cbcee30891cd1c2ccf53785a28651a0d8d8a67", size = 6060362, upload-time = "2025-08-05T16:10:52.81Z" }, + { url = "https://files.pythonhosted.org/packages/76/06/04c8e804f813cf972e3262f3f8584c232de64f0cde9f703b46cf53a45090/virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026", size = 5983279, upload-time = "2025-08-13T14:24:05.111Z" }, ] [[package]]