Skip to content

Commit 78290bc

Browse files
Make fmt
1 parent 2ec34c8 commit 78290bc

File tree

2 files changed

+16
-14
lines changed

2 files changed

+16
-14
lines changed

databricks/sdk/_base_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def __init__(self,
5151
extra_error_customizers: List[_ErrorCustomizer] = None,
5252
debug_headers: bool = False,
5353
clock: Clock = None,
54-
streaming_buffer_size: int = 1024 * 1024): # 1MB
54+
streaming_buffer_size: int = 1024 * 1024): # 1MB
5555
"""
5656
:param debug_truncate_bytes:
5757
:param retry_timeout_seconds:

tests/test_base_client.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from http.server import BaseHTTPRequestHandler
22
from typing import Iterator, List
3+
from unittest.mock import Mock
34

45
import pytest
56
import requests
6-
from unittest.mock import Mock
77

88
from databricks.sdk import errors, useragent
99
from databricks.sdk._base_client import _BaseClient, _StreamingResponse
@@ -279,34 +279,36 @@ def inner(h: BaseHTTPRequestHandler):
279279
assert len(requests) == 2
280280

281281

282-
@pytest.mark.parametrize('chunk_size,expected_chunks,data_size', [
283-
(5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
284-
(10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
285-
(200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
286-
])
282+
@pytest.mark.parametrize('chunk_size,expected_chunks,data_size',
283+
[(5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
284+
(10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
285+
(200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
286+
])
287287
def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
288288
test_data = b"0" * data_size
289289
content_chunks = []
290290

291291
mock_response = Mock(spec=requests.Response)
292+
292293
def mock_iter_content(chunk_size):
293294
# Simulate how requests would chunk the data.
294295
for i in range(0, len(test_data), chunk_size):
295296
chunk = test_data[i:i + chunk_size]
296-
content_chunks.append(chunk) # track chunks for verification
297+
content_chunks.append(chunk) # track chunks for verification
297298
yield chunk
299+
298300
mock_response.iter_content = mock_iter_content
299301
stream = _StreamingResponse(mock_response)
300302
stream.set_chunk_size(chunk_size)
301-
303+
302304
# Read all data one byte at a time.
303305
received_data = b""
304306
while True:
305-
chunk = stream.read(1)
307+
chunk = stream.read(1)
306308
if not chunk:
307309
break
308310
received_data += chunk
309-
310-
assert received_data == test_data # all data was received correctly
311-
assert len(content_chunks) == expected_chunks # correct number of chunks
312-
assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size
311+
312+
assert received_data == test_data # all data was received correctly
313+
assert len(content_chunks) == expected_chunks # correct number of chunks
314+
assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size

0 commit comments

Comments
 (0)