@@ -62,20 +62,12 @@ def sql_fetch_all(ws: WorkspaceClient):
6262
6363@pytest .fixture
6464def make_catalog (sql_exec , make_random ):
65- cleanup = []
66-
67- def inner ():
65+ def create ():
6866 name = f"ucx_C{ make_random (4 )} " .lower ()
6967 sql_exec (f"CREATE CATALOG { name } " )
70- cleanup .append (name )
7168 return name
7269
73- yield inner
74- logger .debug (f"clearing { len (cleanup )} catalogs" )
75- for name in cleanup :
76- logger .debug (f"removing { name } catalog" )
77- sql_exec (f"DROP CATALOG IF EXISTS { name } CASCADE" )
78- logger .debug (f"removed { len (cleanup )} catalogs" )
70+ yield from factory ("catalog" , create , lambda name : sql_exec (f"DROP CATALOG IF EXISTS { name } CASCADE" )) # noqa: F405
7971
8072
8173def test_catalog_fixture (make_catalog ):
@@ -85,20 +77,12 @@ def test_catalog_fixture(make_catalog):
8577
8678@pytest .fixture
8779def make_schema (sql_exec , make_random ):
88- cleanup = []
89-
90- def inner (catalog = "hive_metastore" ):
80+ def create (* , catalog = "hive_metastore" ):
9181 name = f"{ catalog } .ucx_S{ make_random (4 )} " .lower ()
9282 sql_exec (f"CREATE SCHEMA { name } " )
93- cleanup .append (name )
9483 return name
9584
96- yield inner
97- logger .debug (f"clearing { len (cleanup )} schemas" )
98- for name in cleanup :
99- logger .debug (f"removing { name } schema" )
100- sql_exec (f"DROP SCHEMA IF EXISTS { name } CASCADE" )
101- logger .debug (f"removed { len (cleanup )} schemas" )
85+ yield from factory ("schema" , create , lambda name : sql_exec (f"DROP SCHEMA IF EXISTS { name } CASCADE" )) # noqa: F405
10286
10387
10488def test_schema_fixture (make_schema ):
@@ -108,14 +92,12 @@ def test_schema_fixture(make_schema):
10892
10993@pytest .fixture
11094def make_table (sql_exec , make_schema , make_random ):
111- cleanup = []
112-
113- def inner (
95+ def create (
11496 * ,
11597 catalog = "hive_metastore" ,
11698 schema : str | None = None ,
11799 ctas : str | None = None ,
118- non_detla : bool = False ,
100+ non_delta : bool = False ,
119101 external : bool = False ,
120102 view : bool = False ,
121103 ):
@@ -126,7 +108,7 @@ def inner(
126108 if ctas is not None :
127109 # temporary (if not view)
128110 ddl = f"{ ddl } AS { ctas } "
129- elif non_detla :
111+ elif non_delta :
130112 location = "dbfs:/databricks-datasets/iot-stream/data-device"
131113 ddl = f"{ ddl } USING json LOCATION '{ location } '"
132114 elif external :
@@ -137,29 +119,25 @@ def inner(
137119 # managed table
138120 ddl = f"{ ddl } (id INT, value STRING)"
139121 sql_exec (ddl )
140- cleanup .append (name )
141122 return name
142123
143- yield inner
144-
145- logger .debug (f"clearing { len (cleanup )} tables" )
146- for name in cleanup :
147- logger .debug (f"removing { name } table" )
124+ def remove (name ):
148125 try :
149126 sql_exec (f"DROP TABLE IF EXISTS { name } " )
150127 except RuntimeError as e :
151128 if "Cannot drop a view" in str (e ):
152129 sql_exec (f"DROP VIEW IF EXISTS { name } " )
153130 else :
154131 raise e
155- logger .debug (f"removed { len (cleanup )} tables" )
132+
133+ yield from factory ("table" , create , remove ) # noqa: F405
156134
157135
158136def test_table_fixture (make_table ):
159137 logger .info (f"Created new managed table in new schema: { make_table ()} " )
160138 logger .info (f'Created new managed table in default schema: { make_table (schema = "default" )} ' )
161139 logger .info (f"Created new external table in new schema: { make_table (external = True )} " )
162- logger .info (f"Created new external JSON table in new schema: { make_table (non_detla = True )} " )
140+ logger .info (f"Created new external JSON table in new schema: { make_table (non_delta = True )} " )
163141 logger .info (f'Created new tmp table in new schema: { make_table (ctas = "SELECT 2+2 AS four" )} ' )
164142 logger .info (f'Created new view in new schema: { make_table (view = True , ctas = "SELECT 2+2 AS four" )} ' )
165143
0 commit comments