Skip to content

Commit b4de9b6

Browse files
authored
Improved error handling for migrate-tables workflows (#1674)
## Changes - improved error handling for `migrate-tables` workflows ### Linked issues <!-- DOC: Link issue with a keyword: close, closes, closed, fix, fixes, fixed, resolve, resolves, resolved. See https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword --> Resolves #1673 ### Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] manually tested - [x] added unit tests - [x] verified on staging environment (screenshot attached)
1 parent 7997179 commit b4de9b6

File tree

2 files changed

+86
-5
lines changed

2 files changed

+86
-5
lines changed

src/databricks/labs/ucx/hive_metastore/mapping.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from databricks.labs.blueprint.parallel import Threads
99
from databricks.labs.lsql.backends import SqlBackend
1010
from databricks.sdk import WorkspaceClient
11-
from databricks.sdk.errors import BadRequest, NotFound, ResourceConflict
11+
from databricks.sdk.errors import BadRequest, NotFound, ResourceConflict, DatabricksError
1212
from databricks.sdk.service.catalog import TableInfo, SchemaInfo
1313

1414
from databricks.labs.ucx.account.workspaces import WorkspaceInfo
@@ -182,9 +182,13 @@ def _get_table_in_scope_task(self, table_to_migrate: TableToMigrate) -> TableToM
182182
if self.exists_in_uc(table, rule.as_uc_table_key):
183183
logger.info(f"The intended target for {table.key}, {rule.as_uc_table_key}, already exists.")
184184
return None
185-
result = self._sql_backend.fetch(
186-
f"SHOW TBLPROPERTIES {escape_sql_identifier(table.database)}.{escape_sql_identifier(table.name)}"
187-
)
185+
try:
186+
result = self._sql_backend.fetch(
187+
f"SHOW TBLPROPERTIES {escape_sql_identifier(table.database)}.{escape_sql_identifier(table.name)}"
188+
)
189+
except DatabricksError as err:
190+
logger.warning(f"Failed to get properties for Table {table.key}: {err}")
191+
return None
188192
for value in result:
189193
if value["key"] == self.UCX_SKIP_PROPERTY:
190194
logger.info(f"{table.key} is marked to be skipped")
@@ -199,7 +203,10 @@ def _get_table_in_scope_task(self, table_to_migrate: TableToMigrate) -> TableToM
199203
)
200204
return None
201205
logger.info(f"The upgrade_to target for {table.key} is missing. Unsetting the upgrade_to property")
202-
self._sql_backend.execute(table.sql_unset_upgraded_to())
206+
try:
207+
self._sql_backend.execute(table.sql_unset_upgraded_to())
208+
except DatabricksError as err:
209+
logger.warning(f"Failed to unset upgraded_to property for Table {table.key}: {err}")
203210

204211
return table_to_migrate
205212

tests/unit/hive_metastore/test_mapping.py

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -638,3 +638,77 @@ def test_is_target_exists():
638638
)
639639
assert not table_mapping.exists_in_uc(src_table, "cat1.schema1.dest1")
640640
assert table_mapping.exists_in_uc(src_table, "cat1.schema2.dest2")
641+
642+
643+
def test_mapping_broken_table(caplog):
644+
client = create_autospec(WorkspaceClient)
645+
tables_crawler = create_autospec(TablesCrawler)
646+
# When check table properties, raise token error
647+
backend = MockBackend(fails_on_first={"SHOW TBLPROPERTIES": "tokentoken"})
648+
installation = MockInstallation(
649+
{
650+
'mapping.csv': [
651+
{
652+
'catalog_name': 'catalog',
653+
'dst_schema': 'schema1',
654+
'dst_table': 'table1',
655+
'src_schema': 'schema1',
656+
'src_table': 'table1',
657+
'workspace_name': 'workspace',
658+
},
659+
]
660+
}
661+
)
662+
client.tables.get.side_effect = NotFound()
663+
table_mapping = TableMapping(installation, client, backend)
664+
tables_crawler.snapshot.return_value = [
665+
Table(
666+
object_type="EXTERNAL",
667+
table_format="DELTA",
668+
catalog="hive_metastore",
669+
database="schema1",
670+
name="table1",
671+
),
672+
]
673+
table_mapping.get_tables_to_migrate(tables_crawler)
674+
assert "Failed to get properties for Table hive_metastore.schema1.table1" in caplog.text
675+
676+
677+
def test_table_with_no_target_reverted_failed(caplog):
678+
errors = {"ALTER TABLE": "ALTER_TABLE_FAILED"}
679+
rows = {
680+
"SHOW TBLPROPERTIES schema1.table1": [
681+
{"key": "upgraded_to", "value": "non.existing.table"},
682+
],
683+
}
684+
backend = MockBackend(fails_on_first=errors, rows=rows)
685+
client = create_autospec(WorkspaceClient)
686+
client.tables.get.side_effect = NotFound()
687+
688+
installation = MockInstallation(
689+
{
690+
'mapping.csv': [
691+
{
692+
'workspace_name': "fake_ws",
693+
"catalog_name": 'cat1',
694+
'src_schema': 'schema1',
695+
'dst_schema': 'schema1',
696+
'src_table': 'table1',
697+
'dst_table': 'table1',
698+
}
699+
]
700+
}
701+
)
702+
table_mapping = TableMapping(installation, client, backend)
703+
tables_crawler = create_autospec(TablesCrawler)
704+
tables_crawler.snapshot.return_value = [
705+
Table(
706+
object_type="EXTERNAL",
707+
table_format="DELTA",
708+
catalog="hive_metastore",
709+
database="schema1",
710+
name="table1",
711+
),
712+
]
713+
table_mapping.get_tables_to_migrate(tables_crawler)
714+
assert "Failed to unset upgraded_to property" in caplog.text

0 commit comments

Comments
 (0)