55import shutil
66import subprocess
77import sys
8+ import typing
89import urllib .parse
910from functools import partial
1011from pathlib import Path
1112
1213import pytest
1314
1415from databricks .sdk .service .compute import (ClusterSpec , DataSecurityMode ,
15- Library , ResultType )
16+ Library , ResultType , SparkVersion )
1617from databricks .sdk .service .jobs import NotebookTask , Task , ViewType
1718from databricks .sdk .service .workspace import ImportFormat
1819
@@ -84,19 +85,41 @@ def test_runtime_auth_from_interactive_on_uc(ucws, fresh_wheel_file, env_or_skip
8485 ucws .clusters .permanent_delete (interactive_cluster .cluster_id )
8586
8687
87- def test_runtime_auth_from_jobs (w , fresh_wheel_file , env_or_skip , random ):
88- instance_pool_id = env_or_skip ('TEST_INSTANCE_POOL_ID' )
89-
88+ def _get_lts_versions (w ) -> typing .List [SparkVersion ]:
9089 v = w .clusters .spark_versions ()
9190 lts_runtimes = [
9291 x for x in v .versions
9392 if 'LTS' in x .name and '-ml' not in x .key and '-photon' not in x .key and '-aarch64' not in x .key
9493 ]
94+ return lts_runtimes
95+
96+
97+ def test_runtime_auth_from_jobs_volumes (ucws , fresh_wheel_file , env_or_skip , random , volume ):
98+ dbr_versions = [v for v in _get_lts_versions (ucws ) if int (v .key .split ('.' )[0 ]) >= 15 ]
99+
100+ volume_wheel = f'{ volume } /tmp/wheels/{ random (10 )} /{ fresh_wheel_file .name } '
101+ with fresh_wheel_file .open ('rb' ) as f :
102+ ucws .files .upload (volume_wheel , f )
103+
104+ lib = Library (whl = volume_wheel )
105+ return _test_runtime_auth_from_jobs_inner (ucws , env_or_skip , random , dbr_versions , lib )
106+
107+
108+ def test_runtime_auth_from_jobs_dbfs (w , fresh_wheel_file , env_or_skip , random ):
109+ # Library installation from DBFS is not supported past DBR 14.3
110+ dbr_versions = [v for v in _get_lts_versions (w ) if int (v .key .split ('.' )[0 ]) < 15 ]
95111
96112 dbfs_wheel = f'/tmp/wheels/{ random (10 )} /{ fresh_wheel_file .name } '
97113 with fresh_wheel_file .open ('rb' ) as f :
98114 w .dbfs .upload (dbfs_wheel , f )
99115
116+ lib = Library (whl = f'dbfs:{ dbfs_wheel } ' )
117+ return _test_runtime_auth_from_jobs_inner (w , env_or_skip , random , dbr_versions , lib )
118+
119+
120+ def _test_runtime_auth_from_jobs_inner (w , env_or_skip , random , dbr_versions , library ):
121+ instance_pool_id = env_or_skip ('TEST_INSTANCE_POOL_ID' )
122+
100123 my_name = w .current_user .me ().user_name
101124 notebook_path = f'/Users/{ my_name } /notebook-native-auth'
102125 notebook_content = io .BytesIO (b'''
@@ -109,16 +132,20 @@ def test_runtime_auth_from_jobs(w, fresh_wheel_file, env_or_skip, random):
109132 w .workspace .upload (notebook_path , notebook_content , language = Language .PYTHON , overwrite = True )
110133
111134 tasks = []
112- for v in lts_runtimes :
135+ for v in dbr_versions :
113136 t = Task (task_key = f'test_{ v .key .replace ("." , "_" )} ' ,
114137 notebook_task = NotebookTask (notebook_path = notebook_path ),
115- new_cluster = ClusterSpec (spark_version = v .key ,
116- num_workers = 1 ,
117- instance_pool_id = instance_pool_id ),
118- libraries = [Library (whl = f'dbfs:{ dbfs_wheel } ' )])
138+ new_cluster = ClusterSpec (
139+ spark_version = v .key ,
140+ num_workers = 1 ,
141+ instance_pool_id = instance_pool_id ,
142+ # GCP uses "custom" data security mode by default, which does not support UC.
143+ data_security_mode = DataSecurityMode .SINGLE_USER ),
144+ libraries = [library ])
119145 tasks .append (t )
120146
121- run = w .jobs .submit (run_name = f'Runtime Native Auth { random (10 )} ' , tasks = tasks ).result ()
147+ waiter = w .jobs .submit (run_name = f'Runtime Native Auth { random (10 )} ' , tasks = tasks )
148+ run = waiter .result ()
122149 for task_key , output in _task_outputs (w , run ).items ():
123150 assert my_name in output , f'{ task_key } does not work with notebook native auth'
124151
0 commit comments