Skip to content

Commit 20474c3

Browse files
authored
Fix test failure: test_table_migration_job_refreshes_migration_status[regular-migrate-tables] (#1838)
1 parent eef47a5 commit 20474c3

File tree

2 files changed

+18
-5
lines changed

2 files changed

+18
-5
lines changed

src/databricks/labs/ucx/mixins/fixtures.py

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1281,11 +1281,18 @@ def make_dbfs_data_copy(ws, make_cluster, env_or_skip):
12811281
if ws.config.is_aws:
12821282
cmd_exec = CommandExecutor(ws.clusters, ws.command_execution, lambda: env_or_skip("TEST_WILDCARD_CLUSTER_ID"))
12831283

1284-
def create(*, src_path: str, dst_path: str):
1284+
def create(*, src_path: str, dst_path: str, wait_for_provisioning=True):
1285+
@retried(on=[NotFound], timeout=timedelta(minutes=2))
1286+
def _wait_for_provisioning(path) -> None:
1287+
if not ws.dbfs.exists(path):
1288+
raise NotFound(f"Location not found: {path}")
1289+
12851290
if ws.config.is_aws:
12861291
cmd_exec.run(f"dbutils.fs.cp('{src_path}', '{dst_path}', recurse=True)")
12871292
else:
12881293
ws.dbfs.copy(src_path, dst_path, recursive=True)
1294+
if wait_for_provisioning:
1295+
_wait_for_provisioning(dst_path)
12891296
return dst_path
12901297

12911298
def remove(dst_path: str):
@@ -1299,8 +1306,14 @@ def remove(dst_path: str):
12991306

13001307
@pytest.fixture
13011308
def make_mounted_location(make_random, make_dbfs_data_copy, env_or_skip):
1302-
# make a copy of src data to a new location to avoid overlapping UC table path that will fail other
1303-
# external table migration tests
1309+
"""Make a copy of source data to a new location
1310+
1311+
Use the fixture to avoid overlapping UC table path that will fail other external table migration tests.
1312+
1313+
Note:
1314+
This fixture is different to the other `make_` fixtures as it does not return a `Callable` to make the mounted
1315+
location; the mounted location is made with fixture setup already.
1316+
"""
13041317
existing_mounted_location = f'dbfs:/mnt/{env_or_skip("TEST_MOUNT_NAME")}/a/b/c'
13051318
new_mounted_location = f'dbfs:/mnt/{env_or_skip("TEST_MOUNT_NAME")}/a/b/{make_random(4)}'
13061319
make_dbfs_data_copy(src_path=existing_mounted_location, dst_path=new_mounted_location)

tests/integration/hive_metastore/test_workflows.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
from datetime import timedelta
22

33
import pytest
4-
from databricks.sdk.errors import NotFound
4+
from databricks.sdk.errors import InvalidParameterValue, NotFound
55
from databricks.sdk.retries import retried
66

77

8-
@retried(on=[NotFound], timeout=timedelta(minutes=5))
8+
@retried(on=[NotFound, InvalidParameterValue], timeout=timedelta(minutes=5))
99
@pytest.mark.parametrize(
1010
"prepare_tables_for_migration,workflow",
1111
[

0 commit comments

Comments
 (0)