Skip to content

Commit 31ff8fc

Browse files
committed
Fix test and formatting
1 parent a4c228d commit 31ff8fc

File tree

6 files changed

+181
-205
lines changed

6 files changed

+181
-205
lines changed

databricks/sdk/__init__.py

Lines changed: 3 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

databricks/sdk/_base_client.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import io
22
import logging
3-
from abc import ABC, abstractmethod
43
import urllib.parse
4+
from abc import ABC, abstractmethod
55
from datetime import timedelta
66
from types import TracebackType
77
from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
@@ -285,7 +285,9 @@ def _record_request_log(self, response: requests.Response, raw: bool = False) ->
285285
return
286286
logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
287287

288+
288289
class _RawResponse(ABC):
290+
289291
@abstractmethod
290292
# follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
291293
def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
@@ -295,6 +297,7 @@ def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
295297
def close(self):
296298
pass
297299

300+
298301
class _StreamingResponse(BinaryIO):
299302
_response: _RawResponse
300303
_buffer: bytes

databricks/sdk/config.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,8 @@ class Config:
9292
max_connections_per_pool: int = ConfigAttribute()
9393
databricks_environment: Optional[DatabricksEnvironment] = None
9494

95-
enable_experimental_files_api_client: bool = ConfigAttribute(env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
95+
enable_experimental_files_api_client: bool = ConfigAttribute(
96+
env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
9697
files_api_client_download_max_total_recovers = None
9798
files_api_client_download_max_total_recovers_without_progressing = 1
9899

databricks/sdk/mixins/files.py

Lines changed: 20 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import base64
4+
import logging
45
import os
56
import pathlib
67
import platform
@@ -11,23 +12,25 @@
1112
from collections.abc import Iterator
1213
from io import BytesIO
1314
from types import TracebackType
14-
from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable, Optional, Type, Union)
15+
from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable,
16+
Optional, Type, Union)
1517
from urllib import parse
18+
1619
from requests import RequestException
1720

18-
import logging
21+
from .._base_client import _RawResponse, _StreamingResponse
1922
from .._property import _cached_property
2023
from ..errors import NotFound
2124
from ..service import files
2225
from ..service._internal import _escape_multi_segment_path_parameter
2326
from ..service.files import DownloadResponse
24-
from .._base_client import _RawResponse, _StreamingResponse
2527

2628
if TYPE_CHECKING:
2729
from _typeshed import Self
2830

2931
_LOG = logging.getLogger(__name__)
3032

33+
3134
class _DbfsIO(BinaryIO):
3235
MAX_CHUNK_SIZE = 1024 * 1024
3336

@@ -699,13 +702,17 @@ def _download_raw_stream(self,
699702

700703
result = DownloadResponse.from_dict(res)
701704
if not isinstance(result.contents, _StreamingResponse):
702-
raise Exception("Internal error: response contents is of unexpected type: " + type(result.contents).__name__)
705+
raise Exception("Internal error: response contents is of unexpected type: " +
706+
type(result.contents).__name__)
703707

704708
return result
705709

706710
def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
707711
underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
708-
return _ResilientResponse(self, file_path, downloadResponse.last_modified, offset=0,
712+
return _ResilientResponse(self,
713+
file_path,
714+
downloadResponse.last_modified,
715+
offset=0,
709716
underlying_response=underlying_response)
710717

711718

@@ -728,8 +735,8 @@ def iter_content(self, chunk_size=1, decode_unicode=False):
728735
raise ValueError('Decode unicode is not supported')
729736

730737
iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
731-
self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified,
732-
self.offset, self.api, chunk_size)
738+
self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
739+
self.api, chunk_size)
733740
return self.iterator
734741

735742
def close(self):
@@ -761,7 +768,6 @@ def __init__(self, underlying_iterator, file_path: str, file_last_modified: str,
761768
self._recovers_without_progressing_count: int = 0
762769
self._closed: bool = False
763770

764-
765771
def _should_recover(self) -> bool:
766772
if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
767773
_LOG.debug("Total recovers limit exceeded")
@@ -773,7 +779,7 @@ def _should_recover(self) -> bool:
773779

774780
def _recover(self) -> bool:
775781
if not self._should_recover():
776-
return False # recover suppressed, rethrow original exception
782+
return False # recover suppressed, rethrow original exception
777783

778784
self._total_recovers_count += 1
779785
self._recovers_without_progressing_count += 1
@@ -784,13 +790,15 @@ def _recover(self) -> bool:
784790
_LOG.debug("Trying to recover from offset " + str(self._offset))
785791

786792
# following call includes all the required network retries
787-
downloadResponse = self._api._download_raw_stream(self._file_path, self._offset, self._file_last_modified)
793+
downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
794+
self._file_last_modified)
788795
underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
789-
self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)
796+
self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
797+
decode_unicode=False)
790798
_LOG.debug("Recover succeeded")
791799
return True
792800
except:
793-
return False # recover failed, rethrow original exception
801+
return False # recover failed, rethrow original exception
794802

795803
def __next__(self):
796804
if self._closed:

tests/test_base_client.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,15 @@
88
import requests
99

1010
from databricks.sdk import errors, useragent
11-
from databricks.sdk._base_client import _BaseClient, _StreamingResponse
11+
from databricks.sdk._base_client import (_BaseClient, _RawResponse,
12+
_StreamingResponse)
1213
from databricks.sdk.core import DatabricksError
1314

1415
from .clock import FakeClock
1516
from .fixture_server import http_fixture_server
1617

1718

18-
class DummyResponse(requests.Response):
19+
class DummyResponse(_RawResponse):
1920
_content: Iterator[bytes]
2021
_closed: bool = False
2122

@@ -293,9 +294,9 @@ def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
293294
test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
294295

295296
content_chunks = []
296-
mock_response = Mock(spec=requests.Response)
297+
mock_response = Mock(spec=_RawResponse)
297298

298-
def mock_iter_content(chunk_size):
299+
def mock_iter_content(chunk_size: int, decode_unicode: bool):
299300
# Simulate how requests would chunk the data.
300301
for i in range(0, len(test_data), chunk_size):
301302
chunk = test_data[i:i + chunk_size]

0 commit comments

Comments
 (0)