diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 6bd12e9f3..3d1a02e39 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -6,6 +6,8 @@ ### Bug Fixes +* Always create a new logger instance, rather than using Python's default global logger instance ([#988](https://github.com/databricks/databricks-sdk-py/pull/988)). + ### Documentation ### Internal Changes diff --git a/databricks/sdk/_widgets/__init__.py b/databricks/sdk/_widgets/__init__.py index 3f9c4eefc..c55ae9ff6 100644 --- a/databricks/sdk/_widgets/__init__.py +++ b/databricks/sdk/_widgets/__init__.py @@ -3,6 +3,8 @@ import warnings from abc import ABC, abstractmethod +_LOG = logging.getLogger(__name__) + class WidgetUtils(ABC): @@ -54,7 +56,7 @@ def _remove_all(self): ) == 0 ): - logging.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.") + _LOG.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.") raise EnvironmentError("Not in an interactive notebook.") # For import errors in IPyWidgetUtil, we provide a warning message, prompting users to install the @@ -63,7 +65,7 @@ def _remove_all(self): from .ipywidgets_utils import IPyWidgetUtil widget_impl = IPyWidgetUtil - logging.debug("Using ipywidgets implementation for dbutils.") + _LOG.debug("Using ipywidgets implementation for dbutils.") except ImportError as e: # Since we are certain that we are in an interactive notebook, we can make assumptions about @@ -73,11 +75,11 @@ def _remove_all(self): "\tpip install 'databricks-sdk[notebook]'\n" "Falling back to default_value_only implementation for databricks widgets." ) - logging.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.") + _LOG.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.") raise e except: from .default_widgets_utils import DefaultValueOnlyWidgetUtils widget_impl = DefaultValueOnlyWidgetUtils - logging.debug("Using default_value_only implementation for dbutils.") + _LOG.debug("Using default_value_only implementation for dbutils.") diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 2f5121180..b5dec0847 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -22,6 +22,8 @@ from . import azure, oauth, oidc, oidc_token_supplier +_LOG = logging.getLogger(__name__) + CredentialsProvider = Callable[[], Dict[str, str]] logger = logging.getLogger("databricks.sdk") @@ -598,7 +600,7 @@ def _run_subprocess( kwargs["shell"] = sys.platform.startswith("win") # windows requires shell=True to be able to execute 'az login' or other commands # cannot use shell=True all the time, as it breaks macOS - logging.debug(f"Running command: {' '.join(popenargs)}") + _LOG.debug(f"Running command: {' '.join(popenargs)}") return subprocess.run( popenargs, input=input, diff --git a/databricks/sdk/errors/customizer.py b/databricks/sdk/errors/customizer.py index 6a760b626..0893ed9dd 100644 --- a/databricks/sdk/errors/customizer.py +++ b/databricks/sdk/errors/customizer.py @@ -3,6 +3,8 @@ import requests +_LOG = logging.getLogger(__name__) + class _ErrorCustomizer(abc.ABC): """A customizer for errors from the Databricks REST API.""" @@ -23,7 +25,7 @@ class _RetryAfterCustomizer(_ErrorCustomizer): def _parse_retry_after(cls, response: requests.Response) -> int: retry_after = response.headers.get("Retry-After") if retry_after is None: - logging.debug( + _LOG.debug( f"No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}" ) # 429 requests should include a `Retry-After` header, but if it's missing, @@ -39,7 +41,7 @@ def _parse_retry_after(cls, response: requests.Response) -> int: try: return int(retry_after) except ValueError: - logging.debug( + _LOG.debug( f"Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}" ) # defaulting to 1 sleep second to make self._is_retryable() simpler diff --git a/databricks/sdk/errors/deserializer.py b/databricks/sdk/errors/deserializer.py index 5a6e0da09..80272c589 100644 --- a/databricks/sdk/errors/deserializer.py +++ b/databricks/sdk/errors/deserializer.py @@ -6,6 +6,8 @@ import requests +_LOG = logging.getLogger(__name__) + class _ErrorDeserializer(abc.ABC): """A parser for errors from the Databricks REST API.""" @@ -34,19 +36,19 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) - payload_str = response_body.decode("utf-8") resp = json.loads(payload_str) except UnicodeDecodeError as e: - logging.debug( + _LOG.debug( "_StandardErrorParser: unable to decode response using utf-8", exc_info=e, ) return None except json.JSONDecodeError as e: - logging.debug( + _LOG.debug( "_StandardErrorParser: unable to deserialize response as json", exc_info=e, ) return None if not isinstance(resp, dict): - logging.debug("_StandardErrorParser: response is valid JSON but not a dictionary") + _LOG.debug("_StandardErrorParser: response is valid JSON but not a dictionary") return None error_args = { @@ -84,7 +86,7 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) - payload_str = response_body.decode("utf-8") match = self.__STRING_ERROR_REGEX.match(payload_str) if not match: - logging.debug("_StringErrorParser: unable to parse response as string") + _LOG.debug("_StringErrorParser: unable to parse response as string") return None error_code, message = match.groups() return { @@ -115,5 +117,5 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) - "message": message, "error_code": response.reason.upper().replace(" ", "_"), } - logging.debug("_HtmlErrorParser: no
tag found in error response")
+ _LOG.debug("_HtmlErrorParser: no tag found in error response")
return None
diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py
index 64d83de05..42c555b40 100644
--- a/databricks/sdk/errors/parser.py
+++ b/databricks/sdk/errors/parser.py
@@ -13,6 +13,8 @@
from .private_link import (_get_private_link_validation_error,
_is_private_link_redirect)
+_LOG = logging.getLogger(__name__)
+
# A list of _ErrorDeserializers that are tried in order to parse an API error from a response body. Most errors should
# be parsable by the _StandardErrorDeserializer, but additional parsers can be added here for specific error formats.
# The order of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
@@ -78,7 +80,7 @@ def get_api_error(self, response: requests.Response) -> Optional[DatabricksError
customizer.customize_error(response, error_args)
return _error_mapper(response, error_args)
except Exception as e:
- logging.debug(
+ _LOG.debug(
f"Error parsing response with {parser}, continuing",
exc_info=e,
)
diff --git a/databricks/sdk/runtime/__init__.py b/databricks/sdk/runtime/__init__.py
index adf26c707..298bc5a78 100644
--- a/databricks/sdk/runtime/__init__.py
+++ b/databricks/sdk/runtime/__init__.py
@@ -3,7 +3,7 @@
import logging
from typing import Dict, Optional, Union, cast
-logger = logging.getLogger("databricks.sdk")
+_LOG = logging.getLogger(__name__)
is_local_implementation = True
# All objects that are injected into the Notebook's user namespace should also be made
@@ -28,7 +28,7 @@
# a workaround here for exposing required information in notebook environment
from dbruntime.sdk_credential_provider import init_runtime_native_auth
- logger.debug("runtime SDK credential provider available")
+ _LOG.debug("runtime SDK credential provider available")
dbruntime_objects.append("init_runtime_native_auth")
except ImportError:
init_runtime_native_auth = None
@@ -42,10 +42,10 @@ def init_runtime_repl_auth():
ctx = get_context()
if ctx is None:
- logger.debug("Empty REPL context returned, skipping runtime auth")
+ _LOG.debug("Empty REPL context returned, skipping runtime auth")
return None, None
if ctx.workspaceUrl is None:
- logger.debug("Workspace URL is not available, skipping runtime auth")
+ _LOG.debug("Workspace URL is not available, skipping runtime auth")
return None, None
host = f"https://{ctx.workspaceUrl}"
@@ -113,12 +113,12 @@ def inner() -> Dict[str, str]:
sqlContext: SQLContext = None # type: ignore
table = sqlContext.table
except Exception as e:
- logging.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
+ _LOG.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
try:
from pyspark.sql.functions import udf # type: ignore
except ImportError as e:
- logging.debug(f"Failed to initialise udf global: {e}")
+ _LOG.debug(f"Failed to initialise udf global: {e}")
try:
from databricks.connect import DatabricksSession # type: ignore
@@ -128,13 +128,13 @@ def inner() -> Dict[str, str]:
except Exception as e:
# We are ignoring all failures here because user might want to initialize
# spark session themselves and we don't want to interfere with that
- logging.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")
+ _LOG.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")
try:
# We expect this to fail locally since dbconnect does not support sparkcontext. This is just for typing
sc = spark.sparkContext # type: ignore
except Exception as e:
- logging.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
+ _LOG.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
def display(input=None, *args, **kwargs) -> None: # type: ignore
"""