Skip to content

Commit e82e5f8

Browse files
committed
Merge remote-tracking branch 'upstream' into implement-incremental-uploads
2 parents f1f3e26 + 8740bf8 commit e82e5f8

File tree

6 files changed

+50
-87
lines changed

6 files changed

+50
-87
lines changed

.github/workflows/downstreams.yml

Lines changed: 0 additions & 54 deletions
This file was deleted.

databricks/sdk/errors/base.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88

99
class ErrorDetail:
10-
1110
def __init__(
1211
self,
1312
type: str = None,

tests/integration/conftest.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77

88
import pytest
99

10-
from databricks.sdk import AccountClient, WorkspaceClient
10+
from databricks.sdk import AccountClient, FilesAPI, FilesExt, WorkspaceClient
1111
from databricks.sdk.service.catalog import VolumeType
1212

1313

@@ -125,6 +125,18 @@ def volume(ucws, schema):
125125
ucws.volumes.delete(volume.full_name)
126126

127127

128+
@pytest.fixture(scope="session", params=[False, True])
129+
def files_api(request, ucws) -> FilesAPI:
130+
if request.param:
131+
# ensure new Files API client is used for files of any size
132+
ucws.config.multipart_upload_min_stream_size = 0
133+
# enable new Files API client
134+
return FilesExt(ucws.api_client, ucws.config)
135+
else:
136+
# use the default client
137+
return ucws.files
138+
139+
128140
@pytest.fixture()
129141
def workspace_dir(w, random):
130142
directory = f"/Users/{w.current_user.me().user_name}/dir-{random(12)}"

tests/integration/test_auth.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,12 +108,12 @@ def _get_lts_versions(w) -> typing.List[SparkVersion]:
108108
return lts_runtimes
109109

110110

111-
def test_runtime_auth_from_jobs_volumes(ucws, fresh_wheel_file, env_or_skip, random, volume):
111+
def test_runtime_auth_from_jobs_volumes(ucws, files_api, fresh_wheel_file, env_or_skip, random, volume):
112112
dbr_versions = [v for v in _get_lts_versions(ucws) if int(v.key.split(".")[0]) >= 15]
113113

114114
volume_wheel = f"{volume}/tmp/wheels/{random(10)}/{fresh_wheel_file.name}"
115115
with fresh_wheel_file.open("rb") as f:
116-
ucws.files.upload(volume_wheel, f)
116+
files_api.upload(volume_wheel, f)
117117

118118
lib = Library(whl=volume_wheel)
119119
return _test_runtime_auth_from_jobs_inner(ucws, env_or_skip, random, dbr_versions, lib)

tests/integration/test_files.py

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -218,30 +218,30 @@ def create_volume(w, catalog, schema, volume):
218218
return ResourceWithCleanup(lambda: w.volumes.delete(res.full_name))
219219

220220

221-
def test_files_api_upload_download(ucws, random):
221+
def test_files_api_upload_download(ucws, files_api, random):
222222
w = ucws
223223
schema = "filesit-" + random()
224224
volume = "filesit-" + random()
225225
with ResourceWithCleanup.create_schema(w, "main", schema):
226226
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
227227
f = io.BytesIO(b"some text data")
228228
target_file = f"/Volumes/main/{schema}/{volume}/filesit-with-?-and-#-{random()}.txt"
229-
w.files.upload(target_file, f)
230-
with w.files.download(target_file).contents as f:
229+
files_api.upload(target_file, f)
230+
with files_api.download(target_file).contents as f:
231231
assert f.read() == b"some text data"
232232

233233

234-
def test_files_api_read_twice_from_one_download(ucws, random):
234+
def test_files_api_read_twice_from_one_download(ucws, files_api, random):
235235
w = ucws
236236
schema = "filesit-" + random()
237237
volume = "filesit-" + random()
238238
with ResourceWithCleanup.create_schema(w, "main", schema):
239239
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
240240
f = io.BytesIO(b"some text data")
241241
target_file = f"/Volumes/main/{schema}/{volume}/filesit-{random()}.txt"
242-
w.files.upload(target_file, f)
242+
files_api.upload(target_file, f)
243243

244-
res = w.files.download(target_file).contents
244+
res = files_api.download(target_file).contents
245245

246246
with res:
247247
assert res.read() == b"some text data"
@@ -251,82 +251,82 @@ def test_files_api_read_twice_from_one_download(ucws, random):
251251
res.read()
252252

253253

254-
def test_files_api_delete_file(ucws, random):
254+
def test_files_api_delete_file(ucws, files_api, random):
255255
w = ucws
256256
schema = "filesit-" + random()
257257
volume = "filesit-" + random()
258258
with ResourceWithCleanup.create_schema(w, "main", schema):
259259
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
260260
f = io.BytesIO(b"some text data")
261261
target_file = f"/Volumes/main/{schema}/{volume}/filesit-{random()}.txt"
262-
w.files.upload(target_file, f)
263-
w.files.delete(target_file)
262+
files_api.upload(target_file, f)
263+
files_api.delete(target_file)
264264

265265

266-
def test_files_api_get_metadata(ucws, random):
266+
def test_files_api_get_metadata(ucws, files_api, random):
267267
w = ucws
268268
schema = "filesit-" + random()
269269
volume = "filesit-" + random()
270270
with ResourceWithCleanup.create_schema(w, "main", schema):
271271
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
272272
f = io.BytesIO(b"some text data")
273273
target_file = f"/Volumes/main/{schema}/{volume}/filesit-{random()}.txt"
274-
w.files.upload(target_file, f)
275-
m = w.files.get_metadata(target_file)
274+
files_api.upload(target_file, f)
275+
m = files_api.get_metadata(target_file)
276276
assert m.content_type == "application/octet-stream"
277277
assert m.content_length == 14
278278
assert m.last_modified is not None
279279

280280

281-
def test_files_api_create_directory(ucws, random):
281+
def test_files_api_create_directory(ucws, files_api, random):
282282
w = ucws
283283
schema = "filesit-" + random()
284284
volume = "filesit-" + random()
285285
with ResourceWithCleanup.create_schema(w, "main", schema):
286286
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
287287
target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}/"
288-
w.files.create_directory(target_directory)
288+
files_api.create_directory(target_directory)
289289

290290

291-
def test_files_api_list_directory_contents(ucws, random):
291+
def test_files_api_list_directory_contents(ucws, files_api, random):
292292
w = ucws
293293
schema = "filesit-" + random()
294294
volume = "filesit-" + random()
295295
with ResourceWithCleanup.create_schema(w, "main", schema):
296296
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
297297
target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}"
298-
w.files.upload(target_directory + "/file1.txt", io.BytesIO(b"some text data"))
299-
w.files.upload(target_directory + "/file2.txt", io.BytesIO(b"some text data"))
300-
w.files.upload(target_directory + "/file3.txt", io.BytesIO(b"some text data"))
298+
files_api.upload(target_directory + "/file1.txt", io.BytesIO(b"some text data"))
299+
files_api.upload(target_directory + "/file2.txt", io.BytesIO(b"some text data"))
300+
files_api.upload(target_directory + "/file3.txt", io.BytesIO(b"some text data"))
301301

302-
result = list(w.files.list_directory_contents(target_directory))
302+
result = list(files_api.list_directory_contents(target_directory))
303303
assert len(result) == 3
304304

305305

306-
def test_files_api_delete_directory(ucws, random):
306+
def test_files_api_delete_directory(ucws, files_api, random):
307307
w = ucws
308308
schema = "filesit-" + random()
309309
volume = "filesit-" + random()
310310
with ResourceWithCleanup.create_schema(w, "main", schema):
311311
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
312312
target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}/"
313-
w.files.create_directory(target_directory)
314-
w.files.delete_directory(target_directory)
313+
files_api.create_directory(target_directory)
314+
files_api.delete_directory(target_directory)
315315

316316

317-
def test_files_api_get_directory_metadata(ucws, random):
317+
def test_files_api_get_directory_metadata(ucws, files_api, random):
318318
w = ucws
319319
schema = "filesit-" + random()
320320
volume = "filesit-" + random()
321321
with ResourceWithCleanup.create_schema(w, "main", schema):
322322
with ResourceWithCleanup.create_volume(w, "main", schema, volume):
323323
target_directory = f"/Volumes/main/{schema}/{volume}/filesit-{random()}/"
324-
w.files.create_directory(target_directory)
325-
w.files.get_directory_metadata(target_directory)
324+
files_api.create_directory(target_directory)
325+
files_api.get_directory_metadata(target_directory)
326326

327327

328328
@pytest.mark.benchmark
329-
def test_files_api_download_benchmark(ucws, random):
329+
def test_files_api_download_benchmark(ucws, files_api, random):
330330
w = ucws
331331
schema = "filesit-" + random()
332332
volume = "filesit-" + random()
@@ -335,7 +335,7 @@ def test_files_api_download_benchmark(ucws, random):
335335
# Create a 50 MB file
336336
f = io.BytesIO(bytes(range(256)) * 200000)
337337
target_file = f"/Volumes/main/{schema}/{volume}/filesit-benchmark-{random()}.txt"
338-
w.files.upload(target_file, f)
338+
files_api.upload(target_file, f)
339339

340340
totals = {}
341341
for chunk_size_kb in [
@@ -357,7 +357,7 @@ def test_files_api_download_benchmark(ucws, random):
357357
count = 10
358358
for i in range(count):
359359
start = time.time()
360-
f = w.files.download(target_file).contents
360+
f = files_api.download(target_file).contents
361361
f.set_chunk_size(chunk_size)
362362
with f as vf:
363363
vf.read()

tests/test_model_serving_auth.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,12 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
162162

163163

164164
def test_agent_user_credentials(monkeypatch, mocker):
165+
# Guarantee that the tests defaults to env variables rather than config file.
166+
#
167+
# TODO: this is hacky and we should find a better way to tell the config
168+
# that it should not read from the config file.
169+
monkeypatch.setenv("DATABRICKS_CONFIG_FILE", "x")
170+
165171
monkeypatch.setenv("IS_IN_DB_MODEL_SERVING_ENV", "true")
166172
monkeypatch.setenv("DB_MODEL_SERVING_HOST_URL", "x")
167173
monkeypatch.setattr(
@@ -205,4 +211,4 @@ def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch):
205211
headers = cfg.authenticate()
206212

207213
assert cfg.host == "https://x"
208-
assert headers.get("Authorization") == f"Bearer token"
214+
assert headers.get("Authorization") == "Bearer token"

0 commit comments

Comments
 (0)