@@ -41,7 +41,9 @@ def test_dbfs_io(w, random):
4141@pytest .fixture
4242def junk (w , random ):
4343 from databricks .sdk .files .v2 .client import DbfsClient
44+
4445 dc = DbfsClient (config = w )
46+
4547 def inner (path : str , size = 256 ) -> bytes :
4648 to_write = random (size ).encode ()
4749 with dc .open (path , write = True ) as f :
@@ -55,7 +57,9 @@ def inner(path: str, size=256) -> bytes:
5557@pytest .fixture
5658def ls (w ):
5759 from databricks .sdk .files .v2 .client import DbfsClient
60+
5861 dc = DbfsClient (config = w )
62+
5963 def inner (root : str , recursive = False ) -> List [str ]:
6064 return [f .path .removeprefix (root ) for f in dc .list (root , recursive = recursive )]
6165
@@ -72,6 +76,7 @@ def test_recursive_listing(w, random, junk, ls):
7276 assert ["/01" , "/a/02" , "/a/b/03" ] == ls (root , recursive = True )
7377
7478 from databricks .sdk .files .v2 .client import DbfsClient
79+
7580 dc = DbfsClient (config = w )
7681 dc .delete (root , recursive = True )
7782
@@ -84,8 +89,9 @@ def test_cp_dbfs_folder_to_folder_non_recursive(w, random, junk, ls):
8489 new_root = f"/tmp/{ random ()} "
8590
8691 from databricks .sdk .files .v2 .client import DbfsClient
92+
8793 dc = DbfsClient (config = w )
88-
94+
8995 dc .copy (root , new_root )
9096
9197 assert ["/01" ] == ls (new_root , recursive = True )
@@ -99,6 +105,7 @@ def test_cp_dbfs_folder_to_folder_recursive(w, random, junk, ls):
99105 new_root = f"/tmp/{ random ()} "
100106
101107 from databricks .sdk .files .v2 .client import DbfsClient
108+
102109 dc = DbfsClient (config = w )
103110
104111 dc .copy (root , new_root , recursive = True , overwrite = True )
@@ -114,8 +121,9 @@ def test_cp_dbfs_folder_to_existing_folder_recursive(w, random, junk, ls):
114121 new_root = f"/tmp/{ random ()} "
115122
116123 from databricks .sdk .files .v2 .client import DbfsClient
124+
117125 dc = DbfsClient (config = w )
118-
126+
119127 dc .mkdirs (new_root )
120128 dc .copy (root , new_root , recursive = True , overwrite = True )
121129
@@ -129,8 +137,9 @@ def test_cp_dbfs_file_to_non_existing_location(w, random, junk):
129137 copy_destination = f"{ root } /{ random ()} "
130138
131139 from databricks .sdk .files .v2 .client import DbfsClient
140+
132141 dc = DbfsClient (config = w )
133-
142+
134143 dc .copy (f"{ root } /01" , copy_destination )
135144
136145 with dc .open (copy_destination , read = True ) as f :
@@ -140,10 +149,11 @@ def test_cp_dbfs_file_to_non_existing_location(w, random, junk):
140149def test_cp_dbfs_file_to_existing_folder (w , random , junk ):
141150 root = f"/tmp/{ random ()} "
142151 payload = junk (f"{ root } /01" )
143-
152+
144153 from databricks .sdk .files .v2 .client import DbfsClient
154+
145155 dc = DbfsClient (config = w )
146-
156+
147157 dc .mkdirs (f"{ root } /02" )
148158 dc .copy (f"{ root } /01" , f"{ root } /02" )
149159
@@ -155,8 +165,9 @@ def test_cp_dbfs_file_to_existing_location(w, random, junk):
155165 root = f"/tmp/{ random ()} "
156166 junk (f"{ root } /01" )
157167 junk (f"{ root } /02" )
158-
168+
159169 from databricks .sdk .files .v2 .client import DbfsClient
170+
160171 dc = DbfsClient (config = w )
161172
162173 with pytest .raises (DatabricksError ) as ei :
@@ -170,8 +181,9 @@ def test_cp_dbfs_file_to_existing_location_with_overwrite(w, random, junk):
170181 junk (f"{ root } /02" )
171182
172183 from databricks .sdk .files .v2 .client import DbfsClient
184+
173185 dc = DbfsClient (config = w )
174-
186+
175187 dc .copy (f"{ root } /01" , f"{ root } /02" , overwrite = True )
176188
177189 with dc .open (f"{ root } /02" , read = True ) as f :
@@ -183,6 +195,7 @@ def test_move_within_dbfs(w, random, junk):
183195 payload = junk (f"{ root } /01" )
184196
185197 from databricks .sdk .files .v2 .client import DbfsClient
198+
186199 dc = DbfsClient (config = w )
187200
188201 dc .move_ (f"{ root } /01" , f"{ root } /02" )
@@ -197,8 +210,9 @@ def test_move_from_dbfs_to_local(w, random, junk, tmp_path):
197210 payload_01 = junk (f"{ root } /01" )
198211 payload_02 = junk (f"{ root } /a/02" )
199212 payload_03 = junk (f"{ root } /a/b/03" )
200-
213+
201214 from databricks .sdk .files .v2 .client import DbfsClient
215+
202216 dc = DbfsClient (config = w )
203217
204218 dc .move_ (root , f"file:{ tmp_path } " , recursive = True )
@@ -217,8 +231,9 @@ def test_dbfs_upload_download(w, random, junk, tmp_path):
217231
218232 f = io .BytesIO (b"some text data" )
219233 from databricks .sdk .files .v2 .client import DbfsClient
234+
220235 dc = DbfsClient (config = w )
221-
236+
222237 dc .upload (f"{ root } /01" , f )
223238
224239 with dc .download (f"{ root } /01" ) as f :
@@ -242,11 +257,10 @@ def create_schema(w, catalog, schema):
242257 res = w .schemas .create (catalog_name = catalog , name = schema )
243258 return ResourceWithCleanup (lambda : w .schemas .delete (res .full_name ))
244259
245-
246260 @staticmethod
247261 def create_volume (w , catalog , schema , volume ):
248- from databricks .sdk .catalog .v2 .client import VolumesClient
249262 from databricks .sdk .catalog .v2 .catalog import VolumeType
263+ from databricks .sdk .catalog .v2 .client import VolumesClient
250264
251265 vc = VolumesClient (config = w )
252266 res = vc .create (
0 commit comments