Skip to content

Commit 4303926

Browse files
authored
Fixed assessment workflow failure for jobs running tasks on existing interactive clusters (#2889)
Fixes #2886
1 parent 29a98eb commit 4303926

File tree

1 file changed

+15
-10
lines changed
  • src/databricks/labs/ucx/source_code

1 file changed

+15
-10
lines changed

src/databricks/labs/ucx/source_code/jobs.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from databricks.labs.blueprint.paths import DBFSPath
1818
from databricks.labs.lsql.backends import SqlBackend
1919
from databricks.sdk import WorkspaceClient
20-
from databricks.sdk.errors import NotFound
20+
from databricks.sdk.errors import NotFound, ResourceDoesNotExist
2121
from databricks.sdk.service import compute, jobs
2222
from databricks.sdk.service.workspace import Language
2323

@@ -305,13 +305,14 @@ def _register_pipeline_task(self, graph: DependencyGraph):
305305
def _register_existing_cluster_id(self, graph: DependencyGraph):
306306
if not self._task.existing_cluster_id:
307307
return
308-
309-
# load libraries installed on the referred cluster
310-
library_full_status_list = self._ws.libraries.cluster_status(self._task.existing_cluster_id)
311-
312-
for library_full_status in library_full_status_list:
313-
if library_full_status.library:
314-
yield from self._register_library(graph, library_full_status.library)
308+
try:
309+
# load libraries installed on the referred cluster
310+
library_full_status_list = self._ws.libraries.cluster_status(self._task.existing_cluster_id)
311+
for library_full_status in library_full_status_list:
312+
if library_full_status.library:
313+
yield from self._register_library(graph, library_full_status.library)
314+
except ResourceDoesNotExist:
315+
yield DependencyProblem('cluster-not-found', f'Could not find cluster: {self._task.existing_cluster_id}')
315316

316317
def _register_spark_submit_task(self, graph: DependencyGraph): # pylint: disable=unused-argument
317318
if not self._task.spark_submit_task:
@@ -320,8 +321,12 @@ def _register_spark_submit_task(self, graph: DependencyGraph): # pylint: disabl
320321

321322
def _register_cluster_info(self):
322323
if self._task.existing_cluster_id:
323-
cluster_info = self._ws.clusters.get(self._task.existing_cluster_id)
324-
return self._new_job_cluster_metadata(cluster_info)
324+
try:
325+
cluster_info = self._ws.clusters.get(self._task.existing_cluster_id)
326+
return self._new_job_cluster_metadata(cluster_info)
327+
except ResourceDoesNotExist:
328+
message = f'Could not find cluster: {self._task.existing_cluster_id}'
329+
yield DependencyProblem('cluster-not-found', message)
325330
if self._task.new_cluster:
326331
return self._new_job_cluster_metadata(self._task.new_cluster)
327332
if self._task.job_cluster_key:

0 commit comments

Comments
 (0)