Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion databricks/sdk/runtime/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import logging
from typing import Dict, Optional, Union, cast

from databricks.sdk.errors import DatabricksError

logger = logging.getLogger('databricks.sdk')
is_local_implementation = True

Expand Down Expand Up @@ -73,6 +75,19 @@ def inner() -> Dict[str, str]:
return None, None


def _is_unexpected_exception_loading_user_namespace(e: Exception) -> bool:
# The dbruntime module is not present outside of DBR
if isinstance(e, ImportError):
return False
# In notebooks, the UserNamespaceInitializer works, but the notebook context is not propagated to
# spawned Python subprocesses, resulting in this class throwing an
# pyspark.errors.exceptions.base.PySparkRuntimeError. The SDK does not depend on PySpark, so we
# need to check the type and module name directly.
if type(e).__name__ == 'PySparkRuntimeError' and e.__module__ == 'pyspark.errors.exceptions.base':
return False
return True


try:
# Internal implementation
# Separated from above for backward compatibility
Expand All @@ -85,7 +100,9 @@ def inner() -> Dict[str, str]:
continue
_globals[var] = userNamespaceGlobals[var]
is_local_implementation = False
except ImportError:
except Exception as e:
if _is_unexpected_exception_loading_user_namespace(e):
raise DatabricksError(f"Failed to initialize runtime globals") from e
# OSS implementation
is_local_implementation = True

Expand Down
Loading