2121
2222from databricks .labs .ucx .framework .crawlers import StatementExecutionBackend
2323
24- _LOG = logging .getLogger (__name__ )
24+ logger = logging .getLogger (__name__ )
2525
2626
2727def factory (name , create , remove ):
2828 cleanup = []
2929
3030 def inner (** kwargs ):
3131 x = create (** kwargs )
32- _LOG .debug (f"added { name } fixture: { x } " )
32+ logger .debug (f"added { name } fixture: { x } " )
3333 cleanup .append (x )
3434 return x
3535
3636 yield inner
37- _LOG .debug (f"clearing { len (cleanup )} { name } fixtures" )
37+ logger .debug (f"clearing { len (cleanup )} { name } fixtures" )
3838 for x in cleanup :
3939 try :
40- _LOG .debug (f"removing { name } fixture: { x } " )
40+ logger .debug (f"removing { name } fixture: { x } " )
4141 remove (x )
4242 except DatabricksError as e :
4343 # TODO: fix on the databricks-labs-pytester level
44- _LOG .debug (f"ignoring error while { name } { x } teardown: { e } " )
44+ logger .debug (f"ignoring error while { name } { x } teardown: { e } " )
4545
4646
4747@pytest .fixture
@@ -122,7 +122,7 @@ def acc(product_info, debug_env) -> AccountClient:
122122 # Use variables from Unified Auth
123123 # See https://databricks-sdk-py.readthedocs.io/en/latest/authentication.html
124124 product_name , product_version = product_info
125- _LOG .debug (f"Running with { len (debug_env )} env variables" )
125+ logger .debug (f"Running with { len (debug_env )} env variables" )
126126 return AccountClient (
127127 host = debug_env ["DATABRICKS_HOST" ],
128128 account_id = debug_env ["DATABRICKS_ACCOUNT_ID" ],
@@ -408,7 +408,7 @@ def _scim_values(ids: list[str]) -> list[iam.ComplexValue]:
408408 return [iam .ComplexValue (value = x ) for x in ids ]
409409
410410
411- def _make_group (name , interface , make_random ):
411+ def _make_group (name , cfg , interface , make_random ):
412412 def create (
413413 * ,
414414 members : list [str ] | None = None ,
@@ -425,19 +425,24 @@ def create(
425425 if entitlements is not None :
426426 kwargs ["entitlements" ] = _scim_values (entitlements )
427427 # TODO: REQUEST_LIMIT_EXCEEDED: GetUserPermissionsRequest RPC token bucket limit has been exceeded.
428- return interface .create (** kwargs )
428+ group = interface .create (** kwargs )
429+ if cfg .is_account_client :
430+ logger .info (f"Account group { group .display_name } : { cfg .host } /users/groups/{ group .id } /members" )
431+ else :
432+ logger .info (f"Workspace group { group .display_name } : { cfg .host } #setting/accounts/groups/{ group .id } " )
433+ return group
429434
430435 yield from factory (name , create , lambda item : interface .delete (item .id ))
431436
432437
433438@pytest .fixture
434439def make_group (ws , make_random ):
435- yield from _make_group ("workspace group" , ws .groups , make_random )
440+ yield from _make_group ("workspace group" , ws .config , ws . groups , make_random )
436441
437442
438443@pytest .fixture
439444def make_acc_group (acc , make_random ):
440- yield from _make_group ("account group" , acc .groups , make_random )
445+ yield from _make_group ("account group" , acc .config , acc . groups , make_random )
441446
442447
443448@pytest .fixture
@@ -449,7 +454,11 @@ def create(*, name: str | None = None, **kwargs):
449454 kwargs ["definition" ] = json .dumps (
450455 {"spark_conf.spark.databricks.delta.preview.enabled" : {"type" : "fixed" , "value" : "true" }}
451456 )
452- return ws .cluster_policies .create (name , ** kwargs )
457+ cluster_policy = ws .cluster_policies .create (name , ** kwargs )
458+ logger .info (
459+ f"Cluster policy: { ws .config .host } #setting/clusters/cluster-policies/view/{ cluster_policy .policy_id } "
460+ )
461+ return cluster_policy
453462
454463 yield from factory ("cluster policy" , create , lambda item : ws .cluster_policies .delete (item .policy_id ))
455464
@@ -565,7 +574,9 @@ def create(**kwargs):
565574 timeout_seconds = 0 ,
566575 )
567576 ]
568- return ws .jobs .create (** kwargs )
577+ job = ws .jobs .create (** kwargs )
578+ logger .info (f"Job: { ws .config .host } #job/{ job .job_id } " )
579+ return job
569580
570581 yield from factory ("job" , create , lambda item : ws .jobs .delete (item .job_id ))
571582
@@ -726,13 +737,18 @@ def create() -> CatalogInfo:
726737
727738
728739@pytest .fixture
729- def make_schema (sql_backend , make_random ) -> Callable [..., SchemaInfo ]:
740+ def make_schema (ws , sql_backend , make_random ) -> Callable [..., SchemaInfo ]:
730741 def create (* , catalog_name : str = "hive_metastore" , name : str | None = None ) -> SchemaInfo :
731742 if name is None :
732743 name = f"ucx_S{ make_random (4 )} "
733744 full_name = f"{ catalog_name } .{ name } " .lower ()
734745 sql_backend .execute (f"CREATE SCHEMA { full_name } " )
735- return SchemaInfo (catalog_name = catalog_name , name = name , full_name = full_name )
746+ schema_info = SchemaInfo (catalog_name = catalog_name , name = name , full_name = full_name )
747+ logger .info (
748+ f"Schema { schema_info .full_name } : "
749+ f"{ ws .config .host } /explore/data/{ schema_info .catalog_name } /{ schema_info .name } "
750+ )
751+ return schema_info
736752
737753 yield from factory (
738754 "schema" ,
@@ -742,7 +758,7 @@ def create(*, catalog_name: str = "hive_metastore", name: str | None = None) ->
742758
743759
744760@pytest .fixture
745- def make_table (sql_backend , make_schema , make_random ) -> Callable [..., TableInfo ]:
761+ def make_table (ws , sql_backend , make_schema , make_random ) -> Callable [..., TableInfo ]:
746762 def create (
747763 * ,
748764 catalog_name = "hive_metastore" ,
@@ -781,7 +797,12 @@ def create(
781797 ddl = f"{ ddl } TBLPROPERTIES ({ tbl_properties } )"
782798
783799 sql_backend .execute (ddl )
784- return TableInfo (catalog_name = catalog_name , schema_name = schema_name , name = name , full_name = full_name )
800+ table_info = TableInfo (catalog_name = catalog_name , schema_name = schema_name , name = name , full_name = full_name )
801+ logger .info (
802+ f"Table { table_info .full_name } : "
803+ f"{ ws .config .host } /explore/data/{ table_info .catalog_name } /{ table_info .schema_name } /{ table_info .name } "
804+ )
805+ return table_info
785806
786807 def remove (table_info : TableInfo ):
787808 try :
0 commit comments