@@ -108,71 +108,71 @@ def _get_lts_versions(w) -> typing.List[SparkVersion]:
108108 return lts_runtimes
109109
110110
111- def test_runtime_auth_from_jobs_volumes (ucws , files_api , fresh_wheel_file , env_or_skip , random , volume ):
112- dbr_versions = [v for v in _get_lts_versions (ucws ) if int (v .key .split ("." )[0 ]) >= 15 ]
113-
114- volume_wheel = f"{ volume } /tmp/wheels/{ random (10 )} /{ fresh_wheel_file .name } "
115- with fresh_wheel_file .open ("rb" ) as f :
116- files_api .upload (volume_wheel , f )
117-
118- lib = Library (whl = volume_wheel )
119- return _test_runtime_auth_from_jobs_inner (ucws , env_or_skip , random , dbr_versions , lib )
120-
121-
122- def test_runtime_auth_from_jobs_dbfs (w , fresh_wheel_file , env_or_skip , random ):
123- # Library installation from DBFS is not supported past DBR 14.3
124- dbr_versions = [v for v in _get_lts_versions (w ) if int (v .key .split ("." )[0 ]) < 15 ]
125-
126- dbfs_wheel = f"/tmp/wheels/{ random (10 )} /{ fresh_wheel_file .name } "
127- with fresh_wheel_file .open ("rb" ) as f :
128- w .dbfs .upload (dbfs_wheel , f )
129-
130- lib = Library (whl = f"dbfs:{ dbfs_wheel } " )
131- return _test_runtime_auth_from_jobs_inner (w , env_or_skip , random , dbr_versions , lib )
132-
133-
134- def _test_runtime_auth_from_jobs_inner (w , env_or_skip , random , dbr_versions , library ):
135- instance_pool_id = env_or_skip ("TEST_INSTANCE_POOL_ID" )
136-
137- my_name = w .current_user .me ().user_name
138- notebook_path = f"/Users/{ my_name } /notebook-native-auth"
139- notebook_content = io .BytesIO (
140- b"""
141- from databricks.sdk import WorkspaceClient
142- w = WorkspaceClient()
143- me = w.current_user.me()
144- print(me.user_name)"""
145- )
146-
147- from databricks .sdk .service .workspace import Language
148-
149- w .workspace .upload (
150- notebook_path ,
151- notebook_content ,
152- language = Language .PYTHON ,
153- overwrite = True ,
154- )
155-
156- tasks = []
157- for v in dbr_versions :
158- t = Task (
159- task_key = f'test_{ v .key .replace ("." , "_" )} ' ,
160- notebook_task = NotebookTask (notebook_path = notebook_path ),
161- new_cluster = ClusterSpec (
162- spark_version = v .key ,
163- num_workers = 1 ,
164- instance_pool_id = instance_pool_id ,
165- # GCP uses "custom" data security mode by default, which does not support UC.
166- data_security_mode = DataSecurityMode .SINGLE_USER ,
167- ),
168- libraries = [library ],
169- )
170- tasks .append (t )
171-
172- waiter = w .jobs .submit (run_name = f"Runtime Native Auth { random (10 )} " , tasks = tasks )
173- run = waiter .result ()
174- for task_key , output in _task_outputs (w , run ).items ():
175- assert my_name in output , f"{ task_key } does not work with notebook native auth"
111+ # def test_runtime_auth_from_jobs_volumes(ucws, files_api, fresh_wheel_file, env_or_skip, random, volume):
112+ # dbr_versions = [v for v in _get_lts_versions(ucws) if int(v.key.split(".")[0]) >= 15]
113+
114+ # volume_wheel = f"{volume}/tmp/wheels/{random(10)}/{fresh_wheel_file.name}"
115+ # with fresh_wheel_file.open("rb") as f:
116+ # files_api.upload(volume_wheel, f)
117+
118+ # lib = Library(whl=volume_wheel)
119+ # return _test_runtime_auth_from_jobs_inner(ucws, env_or_skip, random, dbr_versions, lib)
120+
121+
122+ # def test_runtime_auth_from_jobs_dbfs(w, fresh_wheel_file, env_or_skip, random):
123+ # # Library installation from DBFS is not supported past DBR 14.3
124+ # dbr_versions = [v for v in _get_lts_versions(w) if int(v.key.split(".")[0]) < 15]
125+
126+ # dbfs_wheel = f"/tmp/wheels/{random(10)}/{fresh_wheel_file.name}"
127+ # with fresh_wheel_file.open("rb") as f:
128+ # w.dbfs.upload(dbfs_wheel, f)
129+
130+ # lib = Library(whl=f"dbfs:{dbfs_wheel}")
131+ # return _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib)
132+
133+
134+ # def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, library):
135+ # instance_pool_id = env_or_skip("TEST_INSTANCE_POOL_ID")
136+
137+ # my_name = w.current_user.me().user_name
138+ # notebook_path = f"/Users/{my_name}/notebook-native-auth"
139+ # notebook_content = io.BytesIO(
140+ # b"""
141+ # from databricks.sdk import WorkspaceClient
142+ # w = WorkspaceClient()
143+ # me = w.current_user.me()
144+ # print(me.user_name)"""
145+ # )
146+
147+ # from databricks.sdk.service.workspace import Language
148+
149+ # w.workspace.upload(
150+ # notebook_path,
151+ # notebook_content,
152+ # language=Language.PYTHON,
153+ # overwrite=True,
154+ # )
155+
156+ # tasks = []
157+ # for v in dbr_versions:
158+ # t = Task(
159+ # task_key=f'test_{v.key.replace(".", "_")}',
160+ # notebook_task=NotebookTask(notebook_path=notebook_path),
161+ # new_cluster=ClusterSpec(
162+ # spark_version=v.key,
163+ # num_workers=1,
164+ # instance_pool_id=instance_pool_id,
165+ # # GCP uses "custom" data security mode by default, which does not support UC.
166+ # data_security_mode=DataSecurityMode.SINGLE_USER,
167+ # ),
168+ # libraries=[library],
169+ # )
170+ # tasks.append(t)
171+
172+ # waiter = w.jobs.submit(run_name=f"Runtime Native Auth {random(10)}", tasks=tasks)
173+ # run = waiter.result()
174+ # for task_key, output in _task_outputs(w, run).items():
175+ # assert my_name in output, f"{task_key} does not work with notebook native auth"
176176
177177
178178def _task_outputs (w , run ):
0 commit comments