Skip to content

Commit 1f2cce6

Browse files
committed
make fmt
1 parent 7224d7d commit 1f2cce6

21 files changed

+59
-39
lines changed

databricks/sdk/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,8 @@
99
from databricks.sdk.databricks import azure
1010
from databricks.sdk.databricks.credentials_provider import CredentialsStrategy
1111
from databricks.sdk.databricks.data_plane import DataPlaneTokenSource
12-
from databricks.sdk.files.v2.mixin import DbfsExt, FilesExt
1312
from databricks.sdk.files.v2.files import FilesAPI
13+
from databricks.sdk.files.v2.mixin import DbfsExt, FilesExt
1414

1515
_LOG = logging.getLogger(__name__)
1616

databricks/sdk/files/v2/client.py

Lines changed: 0 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

databricks/sdk/files/v2/mixin.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -721,7 +721,7 @@ def download(self, file_path: str) -> DownloadResponse:
721721
if not self._config.enable_experimental_files_api_client:
722722
# Use the new Files API client for downloads
723723
return super().download(file_path)
724-
724+
725725
initial_response: DownloadResponse = self._open_download_stream(
726726
file_path=file_path,
727727
start_byte_offset=0,

tests/integration/test_clusters.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
1-
import logging
2-
from datetime import timedelta
31

42
import pytest
53

6-
from databricks.sdk.databricks.core import DatabricksError
7-
from databricks.sdk.compute.v2.client import ClustersClient
84
from databricks.sdk.compute.v2 import compute
5+
from databricks.sdk.compute.v2.client import ClustersClient
6+
from databricks.sdk.databricks.core import DatabricksError
7+
98
# from databricks.sdk.service.compute import EventType
109

1110

@@ -40,7 +39,7 @@ def test_ensure_cluster_is_running(w, env_or_skip):
4039
# def test_create_cluster(w, env_or_skip, random):
4140
# from databricks.sdk.compute.v2.client import ClustersClient
4241
# cc = ClustersClient(config=w)
43-
42+
4443
# info = cc.create(
4544
# cluster_name=f"databricks-sdk-py-{random(8)}",
4645
# spark_version=cc.select_spark_version(long_term_support=True),
@@ -53,6 +52,7 @@ def test_ensure_cluster_is_running(w, env_or_skip):
5352

5453
def test_error_unmarshall(w, random):
5554
from databricks.sdk.compute.v2.client import ClustersClient
55+
5656
cc = ClustersClient(config=w)
5757
with pytest.raises(DatabricksError) as exc_info:
5858
cc.get("123__non_existing__")

tests/integration/test_commands.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
def test_error_unmarshall(w, random):
77
from databricks.sdk.compute.v2.client import CommandExecutionClient
8+
89
cec = CommandExecutionClient(config=w)
910
with pytest.raises(DatabricksError) as exc_info:
1011
cec.execute(cluster_id="__non_existing__")

tests/integration/test_data_plane.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
from databricks.sdk.databricks.data_plane import DataPlaneTokenSource
21

32
# TODO: Re-enable this after adding data plane services to the SDK
43
# def test_data_plane_token_source(ucws, env_or_skip):

tests/integration/test_deployment.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
def test_workspaces(a):
77
from databricks.sdk.provisioning.v2.client import WorkspacesClient
8+
89
wc = WorkspacesClient(config=a)
910
if a.is_azure:
1011
pytest.skip("not available on Azure")

tests/integration/test_external_browser.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ def env(env_or_skip):
1212

1313
def test_pkce_app(env):
1414
from databricks.sdk.compute.v2.client import ClustersClient
15+
1516
cc = ClustersClient(
1617
host=env("DATABRICKS_HOST"),
1718
client_id=env("TEST_PKCE_APP_CLIENT_ID"),
@@ -24,6 +25,7 @@ def test_pkce_app(env):
2425

2526
def test_public_app(env):
2627
from databricks.sdk.compute.v2.client import ClustersClient
28+
2729
cc = ClustersClient(
2830
host=env("DATABRICKS_HOST"),
2931
client_id=env("TEST_PUBLIC_APP_CLIENT_ID"),
@@ -36,6 +38,7 @@ def test_public_app(env):
3638

3739
def test_private_app(env):
3840
from databricks.sdk.compute.v2.client import ClustersClient
41+
3942
cc = ClustersClient(
4043
host=env("DATABRICKS_HOST"),
4144
client_id=env("TEST_PRIVATE_APP_CLIENT_ID"),

tests/integration/test_files.py

Lines changed: 25 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,9 @@ def test_dbfs_io(w, random):
4141
@pytest.fixture
4242
def junk(w, random):
4343
from databricks.sdk.files.v2.client import DbfsClient
44+
4445
dc = DbfsClient(config=w)
46+
4547
def inner(path: str, size=256) -> bytes:
4648
to_write = random(size).encode()
4749
with dc.open(path, write=True) as f:
@@ -55,7 +57,9 @@ def inner(path: str, size=256) -> bytes:
5557
@pytest.fixture
5658
def ls(w):
5759
from databricks.sdk.files.v2.client import DbfsClient
60+
5861
dc = DbfsClient(config=w)
62+
5963
def inner(root: str, recursive=False) -> List[str]:
6064
return [f.path.removeprefix(root) for f in dc.list(root, recursive=recursive)]
6165

@@ -72,6 +76,7 @@ def test_recursive_listing(w, random, junk, ls):
7276
assert ["/01", "/a/02", "/a/b/03"] == ls(root, recursive=True)
7377

7478
from databricks.sdk.files.v2.client import DbfsClient
79+
7580
dc = DbfsClient(config=w)
7681
dc.delete(root, recursive=True)
7782

@@ -84,8 +89,9 @@ def test_cp_dbfs_folder_to_folder_non_recursive(w, random, junk, ls):
8489
new_root = f"/tmp/{random()}"
8590

8691
from databricks.sdk.files.v2.client import DbfsClient
92+
8793
dc = DbfsClient(config=w)
88-
94+
8995
dc.copy(root, new_root)
9096

9197
assert ["/01"] == ls(new_root, recursive=True)
@@ -99,6 +105,7 @@ def test_cp_dbfs_folder_to_folder_recursive(w, random, junk, ls):
99105
new_root = f"/tmp/{random()}"
100106

101107
from databricks.sdk.files.v2.client import DbfsClient
108+
102109
dc = DbfsClient(config=w)
103110

104111
dc.copy(root, new_root, recursive=True, overwrite=True)
@@ -114,8 +121,9 @@ def test_cp_dbfs_folder_to_existing_folder_recursive(w, random, junk, ls):
114121
new_root = f"/tmp/{random()}"
115122

116123
from databricks.sdk.files.v2.client import DbfsClient
124+
117125
dc = DbfsClient(config=w)
118-
126+
119127
dc.mkdirs(new_root)
120128
dc.copy(root, new_root, recursive=True, overwrite=True)
121129

@@ -129,8 +137,9 @@ def test_cp_dbfs_file_to_non_existing_location(w, random, junk):
129137
copy_destination = f"{root}/{random()}"
130138

131139
from databricks.sdk.files.v2.client import DbfsClient
140+
132141
dc = DbfsClient(config=w)
133-
142+
134143
dc.copy(f"{root}/01", copy_destination)
135144

136145
with dc.open(copy_destination, read=True) as f:
@@ -140,10 +149,11 @@ def test_cp_dbfs_file_to_non_existing_location(w, random, junk):
140149
def test_cp_dbfs_file_to_existing_folder(w, random, junk):
141150
root = f"/tmp/{random()}"
142151
payload = junk(f"{root}/01")
143-
152+
144153
from databricks.sdk.files.v2.client import DbfsClient
154+
145155
dc = DbfsClient(config=w)
146-
156+
147157
dc.mkdirs(f"{root}/02")
148158
dc.copy(f"{root}/01", f"{root}/02")
149159

@@ -155,8 +165,9 @@ def test_cp_dbfs_file_to_existing_location(w, random, junk):
155165
root = f"/tmp/{random()}"
156166
junk(f"{root}/01")
157167
junk(f"{root}/02")
158-
168+
159169
from databricks.sdk.files.v2.client import DbfsClient
170+
160171
dc = DbfsClient(config=w)
161172

162173
with pytest.raises(DatabricksError) as ei:
@@ -170,8 +181,9 @@ def test_cp_dbfs_file_to_existing_location_with_overwrite(w, random, junk):
170181
junk(f"{root}/02")
171182

172183
from databricks.sdk.files.v2.client import DbfsClient
184+
173185
dc = DbfsClient(config=w)
174-
186+
175187
dc.copy(f"{root}/01", f"{root}/02", overwrite=True)
176188

177189
with dc.open(f"{root}/02", read=True) as f:
@@ -183,6 +195,7 @@ def test_move_within_dbfs(w, random, junk):
183195
payload = junk(f"{root}/01")
184196

185197
from databricks.sdk.files.v2.client import DbfsClient
198+
186199
dc = DbfsClient(config=w)
187200

188201
dc.move_(f"{root}/01", f"{root}/02")
@@ -197,8 +210,9 @@ def test_move_from_dbfs_to_local(w, random, junk, tmp_path):
197210
payload_01 = junk(f"{root}/01")
198211
payload_02 = junk(f"{root}/a/02")
199212
payload_03 = junk(f"{root}/a/b/03")
200-
213+
201214
from databricks.sdk.files.v2.client import DbfsClient
215+
202216
dc = DbfsClient(config=w)
203217

204218
dc.move_(root, f"file:{tmp_path}", recursive=True)
@@ -217,8 +231,9 @@ def test_dbfs_upload_download(w, random, junk, tmp_path):
217231

218232
f = io.BytesIO(b"some text data")
219233
from databricks.sdk.files.v2.client import DbfsClient
234+
220235
dc = DbfsClient(config=w)
221-
236+
222237
dc.upload(f"{root}/01", f)
223238

224239
with dc.download(f"{root}/01") as f:
@@ -242,11 +257,10 @@ def create_schema(w, catalog, schema):
242257
res = w.schemas.create(catalog_name=catalog, name=schema)
243258
return ResourceWithCleanup(lambda: w.schemas.delete(res.full_name))
244259

245-
246260
@staticmethod
247261
def create_volume(w, catalog, schema, volume):
248-
from databricks.sdk.catalog.v2.client import VolumesClient
249262
from databricks.sdk.catalog.v2.catalog import VolumeType
263+
from databricks.sdk.catalog.v2.client import VolumesClient
250264

251265
vc = VolumesClient(config=w)
252266
res = vc.create(

tests/integration/test_iam.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,7 @@
22

33
from databricks.sdk.databricks import errors
44
from databricks.sdk.databricks.core import DatabricksError
5-
from databricks.sdk.iam.v2.client import GroupsClient
6-
from databricks.sdk.iam.v2.client import UsersClient
5+
from databricks.sdk.iam.v2.client import GroupsClient, UsersClient
76

87

98
def test_filtering_groups(w, random):
@@ -25,7 +24,7 @@ def test_scim_error_unmarshall(w, random):
2524

2625
def test_scim_get_user_as_dict(w):
2726
uc = UsersClient(config=w)
28-
27+
2928
first_user = next(uc.list())
3029
user = uc.get(first_user.id)
3130
# should not throw

0 commit comments

Comments
 (0)