Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -859,6 +859,7 @@ Create a schema and return its info. Remove it after the test. Returns instance
Keyword Arguments:
* `catalog_name` (str): The name of the catalog where the schema will be created. Default is `hive_metastore`.
* `name` (str): The name of the schema. Default is a random string.
* `location` (str): The path to the location if it should be a managed schema.

Usage:
```python
Expand Down
18 changes: 11 additions & 7 deletions src/databricks/labs/pytester/fixtures/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,17 +241,15 @@ def remove(table_info: TableInfo):

@fixture
def make_schema(
sql_backend,
make_random,
log_workspace_link,
watchdog_remove_after,
sql_backend, make_random, log_workspace_link, watchdog_remove_after
) -> Generator[Callable[..., SchemaInfo], None, None]:
"""
Create a schema and return its info. Remove it after the test. Returns instance of `databricks.sdk.service.catalog.SchemaInfo`.

Keyword Arguments:
* `catalog_name` (str): The name of the catalog where the schema will be created. Default is `hive_metastore`.
* `name` (str): The name of the schema. Default is a random string.
* `location` (str): The path to the location if it should be a managed schema.

Usage:
```python
Expand All @@ -263,11 +261,17 @@ def test_catalog_fixture(make_catalog, make_schema, make_table):
```
"""

def create(*, catalog_name: str = "hive_metastore", name: str | None = None) -> SchemaInfo:
def create(
*, catalog_name: str = "hive_metastore", name: str | None = None, location: str | None = None
) -> SchemaInfo:
name = name or f"dummy_s{make_random(4)}".lower()
full_name = f"{catalog_name}.{name}".lower()
sql_backend.execute(f"CREATE SCHEMA {full_name} WITH DBPROPERTIES (RemoveAfter={watchdog_remove_after})")
schema_info = SchemaInfo(catalog_name=catalog_name, name=name, full_name=full_name)
schema_ddl = f"CREATE SCHEMA {full_name}"
if location:
schema_ddl = f"{schema_ddl} LOCATION '{location}'"
schema_ddl = f"{schema_ddl} WITH DBPROPERTIES (RemoveAfter={watchdog_remove_after})"
sql_backend.execute(schema_ddl)
schema_info = SchemaInfo(catalog_name=catalog_name, name=name, full_name=full_name, storage_location=location)
path = f'explore/data/{schema_info.catalog_name}/{schema_info.name}'
log_workspace_link(f'{schema_info.full_name} schema', path)
return schema_info
Expand Down
6 changes: 6 additions & 0 deletions tests/integration/fixtures/test_catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@ def test_schema_fixture(make_schema):
logger.info(f"Created new schema: {make_schema()}")


def test_managed_schema_fixture(make_schema, make_random, env_or_skip):
schema_name = f"dummy_s{make_random(4)}".lower()
schema_location = f"{env_or_skip('TEST_MOUNT_CONTAINER')}/a/{schema_name}"
logger.info(f"Created new schema: {make_schema(location = schema_location)}")


def test_new_managed_table_in_new_schema(make_table):
logger.info(f"Created new managed table in new schema: {make_table()}")

Expand Down
24 changes: 22 additions & 2 deletions tests/unit/fixtures/test_catalog.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
from unittest.mock import ANY

from databricks.sdk.service.catalog import TableInfo, TableType, DataSourceFormat, FunctionInfo
from databricks.sdk.service.catalog import TableInfo, TableType, DataSourceFormat, FunctionInfo, SchemaInfo

from databricks.labs.pytester.fixtures.unwrap import call_stateful
from databricks.labs.pytester.fixtures.catalog import make_table, make_udf, make_catalog, make_storage_credential
from databricks.labs.pytester.fixtures.catalog import (
make_table,
make_udf,
make_catalog,
make_storage_credential,
make_schema,
)


def test_make_table_no_args():
Expand Down Expand Up @@ -137,3 +143,17 @@ def test_storage_credential():
ctx, fn_info = call_stateful(make_storage_credential, credential_name='abc')
assert ctx is not None
assert fn_info is not None


def test_make_schema() -> None:
ctx, info = call_stateful(make_schema, name='abc', location='abfss://[email protected]')
assert ctx['sql_backend'].queries == [
"CREATE SCHEMA hive_metastore.abc LOCATION 'abfss://[email protected]' WITH DBPROPERTIES (RemoveAfter=2024091313)",
"DROP SCHEMA IF EXISTS hive_metastore.abc CASCADE",
]
assert info == SchemaInfo(
catalog_name='hive_metastore',
name='abc',
full_name='hive_metastore.abc',
storage_location='abfss://[email protected]',
)
Loading