Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

### Bug Fixes

* Always create a new logger instance, rather than using Python's default global logger instance.

### Documentation

### Internal Changes
Expand Down
9 changes: 5 additions & 4 deletions databricks/sdk/_widgets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import warnings
from abc import ABC, abstractmethod

_LOG = logging.getLogger(__name__)

class WidgetUtils(ABC):

Expand Down Expand Up @@ -54,7 +55,7 @@ def _remove_all(self):
)
== 0
):
logging.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.")
_LOG.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.")
raise EnvironmentError("Not in an interactive notebook.")

# For import errors in IPyWidgetUtil, we provide a warning message, prompting users to install the
Expand All @@ -63,7 +64,7 @@ def _remove_all(self):
from .ipywidgets_utils import IPyWidgetUtil

widget_impl = IPyWidgetUtil
logging.debug("Using ipywidgets implementation for dbutils.")
_LOG.debug("Using ipywidgets implementation for dbutils.")

except ImportError as e:
# Since we are certain that we are in an interactive notebook, we can make assumptions about
Expand All @@ -73,11 +74,11 @@ def _remove_all(self):
"\tpip install 'databricks-sdk[notebook]'\n"
"Falling back to default_value_only implementation for databricks widgets."
)
logging.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.")
_LOG.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.")
raise e

except:
from .default_widgets_utils import DefaultValueOnlyWidgetUtils

widget_impl = DefaultValueOnlyWidgetUtils
logging.debug("Using default_value_only implementation for dbutils.")
_LOG.debug("Using default_value_only implementation for dbutils.")
4 changes: 3 additions & 1 deletion databricks/sdk/credentials_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@

from . import azure, oauth, oidc, oidc_token_supplier

_LOG = logging.getLogger(__name__)

CredentialsProvider = Callable[[], Dict[str, str]]

logger = logging.getLogger("databricks.sdk")
Expand Down Expand Up @@ -598,7 +600,7 @@ def _run_subprocess(
kwargs["shell"] = sys.platform.startswith("win")
# windows requires shell=True to be able to execute 'az login' or other commands
# cannot use shell=True all the time, as it breaks macOS
logging.debug(f"Running command: {' '.join(popenargs)}")
_LOG.debug(f"Running command: {' '.join(popenargs)}")
return subprocess.run(
popenargs,
input=input,
Expand Down
5 changes: 3 additions & 2 deletions databricks/sdk/errors/customizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import requests

_LOG = logging.getLogger(__name__)

class _ErrorCustomizer(abc.ABC):
"""A customizer for errors from the Databricks REST API."""
Expand All @@ -23,7 +24,7 @@ class _RetryAfterCustomizer(_ErrorCustomizer):
def _parse_retry_after(cls, response: requests.Response) -> int:
retry_after = response.headers.get("Retry-After")
if retry_after is None:
logging.debug(
_LOG.debug(
f"No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}"
)
# 429 requests should include a `Retry-After` header, but if it's missing,
Expand All @@ -39,7 +40,7 @@ def _parse_retry_after(cls, response: requests.Response) -> int:
try:
return int(retry_after)
except ValueError:
logging.debug(
_LOG.debug(
f"Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}"
)
# defaulting to 1 sleep second to make self._is_retryable() simpler
Expand Down
12 changes: 7 additions & 5 deletions databricks/sdk/errors/deserializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

import requests

_LOG = logging.getLogger(__name__)


class _ErrorDeserializer(abc.ABC):
"""A parser for errors from the Databricks REST API."""
Expand Down Expand Up @@ -34,19 +36,19 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) -
payload_str = response_body.decode("utf-8")
resp = json.loads(payload_str)
except UnicodeDecodeError as e:
logging.debug(
_LOG.debug(
"_StandardErrorParser: unable to decode response using utf-8",
exc_info=e,
)
return None
except json.JSONDecodeError as e:
logging.debug(
_LOG.debug(
"_StandardErrorParser: unable to deserialize response as json",
exc_info=e,
)
return None
if not isinstance(resp, dict):
logging.debug("_StandardErrorParser: response is valid JSON but not a dictionary")
_LOG.debug("_StandardErrorParser: response is valid JSON but not a dictionary")
return None

error_args = {
Expand Down Expand Up @@ -84,7 +86,7 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) -
payload_str = response_body.decode("utf-8")
match = self.__STRING_ERROR_REGEX.match(payload_str)
if not match:
logging.debug("_StringErrorParser: unable to parse response as string")
_LOG.debug("_StringErrorParser: unable to parse response as string")
return None
error_code, message = match.groups()
return {
Expand Down Expand Up @@ -115,5 +117,5 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) -
"message": message,
"error_code": response.reason.upper().replace(" ", "_"),
}
logging.debug("_HtmlErrorParser: no <pre> tag found in error response")
_LOG.debug("_HtmlErrorParser: no <pre> tag found in error response")
return None
4 changes: 3 additions & 1 deletion databricks/sdk/errors/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
from .private_link import (_get_private_link_validation_error,
_is_private_link_redirect)

_LOG = logging.getLogger(__name__)

# A list of _ErrorDeserializers that are tried in order to parse an API error from a response body. Most errors should
# be parsable by the _StandardErrorDeserializer, but additional parsers can be added here for specific error formats.
# The order of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
Expand Down Expand Up @@ -78,7 +80,7 @@ def get_api_error(self, response: requests.Response) -> Optional[DatabricksError
customizer.customize_error(response, error_args)
return _error_mapper(response, error_args)
except Exception as e:
logging.debug(
_LOG.debug(
f"Error parsing response with {parser}, continuing",
exc_info=e,
)
Expand Down
16 changes: 8 additions & 8 deletions databricks/sdk/runtime/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
from typing import Dict, Optional, Union, cast

logger = logging.getLogger("databricks.sdk")
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This existed before, but wasn't consistently used. I've renamed it to _LOG as it is not meant to be exposed publicly and it is consistent with other places within the SDK.

_LOG = logging.getLogger(__name__)
is_local_implementation = True

# All objects that are injected into the Notebook's user namespace should also be made
Expand All @@ -28,7 +28,7 @@
# a workaround here for exposing required information in notebook environment
from dbruntime.sdk_credential_provider import init_runtime_native_auth

logger.debug("runtime SDK credential provider available")
_LOG.debug("runtime SDK credential provider available")
dbruntime_objects.append("init_runtime_native_auth")
except ImportError:
init_runtime_native_auth = None
Expand All @@ -42,10 +42,10 @@ def init_runtime_repl_auth():

ctx = get_context()
if ctx is None:
logger.debug("Empty REPL context returned, skipping runtime auth")
_LOG.debug("Empty REPL context returned, skipping runtime auth")
return None, None
if ctx.workspaceUrl is None:
logger.debug("Workspace URL is not available, skipping runtime auth")
_LOG.debug("Workspace URL is not available, skipping runtime auth")
return None, None
host = f"https://{ctx.workspaceUrl}"

Expand Down Expand Up @@ -113,12 +113,12 @@ def inner() -> Dict[str, str]:
sqlContext: SQLContext = None # type: ignore
table = sqlContext.table
except Exception as e:
logging.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
_LOG.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")

try:
from pyspark.sql.functions import udf # type: ignore
except ImportError as e:
logging.debug(f"Failed to initialise udf global: {e}")
_LOG.debug(f"Failed to initialise udf global: {e}")

try:
from databricks.connect import DatabricksSession # type: ignore
Expand All @@ -128,13 +128,13 @@ def inner() -> Dict[str, str]:
except Exception as e:
# We are ignoring all failures here because user might want to initialize
# spark session themselves and we don't want to interfere with that
logging.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")
_LOG.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")

try:
# We expect this to fail locally since dbconnect does not support sparkcontext. This is just for typing
sc = spark.sparkContext # type: ignore
except Exception as e:
logging.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
_LOG.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")

def display(input=None, *args, **kwargs) -> None: # type: ignore
"""
Expand Down
Loading