Skip to content

Commit 6a4a5f5

Browse files
authored
Migrate cluster policies to new fixtures (#174)
1 parent 9bdaff9 commit 6a4a5f5

File tree

3 files changed

+14
-109
lines changed

3 files changed

+14
-109
lines changed

tests/integration/conftest.py

Lines changed: 1 addition & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import pytest
1010
from databricks.sdk import AccountClient, WorkspaceClient
1111
from databricks.sdk.core import Config
12-
from databricks.sdk.service.compute import CreatePolicyResponse
1312
from databricks.sdk.service.iam import AccessControlRequest, PermissionLevel
1413
from databricks.sdk.service.workspace import ObjectInfo, ObjectType
1514

@@ -19,21 +18,14 @@
1918
from databricks.labs.ucx.providers.mixins.sql import StatementExecutionExt
2019
from databricks.labs.ucx.utils import ThreadedExecution
2120

22-
from .utils import (
23-
EnvironmentInfo,
24-
InstanceProfile,
25-
WorkspaceObjects,
26-
_set_random_permissions,
27-
)
21+
from .utils import EnvironmentInfo, InstanceProfile, WorkspaceObjects
2822

2923
logging.getLogger("tests").setLevel("DEBUG")
3024
logging.getLogger("databricks.labs.ucx").setLevel("DEBUG")
3125

3226
logger = logging.getLogger(__name__)
3327

34-
NUM_TEST_GROUPS = int(os.environ.get("NUM_TEST_GROUPS", 5))
3528
NUM_TEST_INSTANCE_PROFILES = int(os.environ.get("NUM_TEST_INSTANCE_PROFILES", 3))
36-
NUM_TEST_CLUSTER_POLICIES = int(os.environ.get("NUM_TEST_CLUSTER_POLICIES", 3))
3729
NUM_TEST_TOKENS = int(os.environ.get("NUM_TEST_TOKENS", 3))
3830

3931
NUM_THREADS = int(os.environ.get("NUM_TEST_THREADS", 20))
@@ -242,41 +234,6 @@ def instance_profiles(env: EnvironmentInfo, ws: WorkspaceClient) -> list[Instanc
242234
logger.debug("Test instance profiles deleted")
243235

244236

245-
@pytest.fixture
246-
def cluster_policies(env: EnvironmentInfo, ws: WorkspaceClient) -> list[CreatePolicyResponse]:
247-
logger.debug("Creating test cluster policies")
248-
249-
test_cluster_policies: list[CreatePolicyResponse] = [
250-
ws.cluster_policies.create(
251-
name=f"{env.test_uid}-test-{i}",
252-
definition="""
253-
{
254-
"spark_version": {
255-
"type": "unlimited",
256-
"defaultValue": "auto:latest-lts"
257-
}
258-
}
259-
""",
260-
)
261-
for i in range(NUM_TEST_CLUSTER_POLICIES)
262-
]
263-
264-
_set_random_permissions(
265-
test_cluster_policies,
266-
"policy_id",
267-
RequestObjectType.CLUSTER_POLICIES,
268-
env,
269-
ws,
270-
permission_levels=[PermissionLevel.CAN_USE],
271-
)
272-
273-
yield test_cluster_policies
274-
275-
logger.debug("Deleting test instance pools")
276-
executables = [partial(ws.cluster_policies.delete, p.policy_id) for p in test_cluster_policies]
277-
Threader(executables).run()
278-
279-
280237
@pytest.fixture
281238
def tokens(ws: WorkspaceClient, env: EnvironmentInfo) -> list[AccessControlRequest]:
282239
logger.debug("Adding token-level permissions to groups")
@@ -352,14 +309,12 @@ def workspace_objects(ws: WorkspaceClient, env: EnvironmentInfo) -> WorkspaceObj
352309

353310
@pytest.fixture
354311
def verifiable_objects(
355-
cluster_policies,
356312
tokens,
357313
workspace_objects,
358314
) -> list[tuple[list, str, RequestObjectType | None]]:
359315
_verifiable_objects = [
360316
(workspace_objects, "workspace_objects", None),
361317
(tokens, "tokens", RequestObjectType.AUTHORIZATION),
362-
(cluster_policies, "policy_id", RequestObjectType.CLUSTER_POLICIES),
363318
]
364319
yield _verifiable_objects
365320

tests/integration/test_e2e.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,8 @@ def test_e2e(
134134
make_instance_pool_permissions,
135135
make_cluster,
136136
make_cluster_permissions,
137+
make_cluster_policy,
138+
make_cluster_policy_permissions,
137139
make_model,
138140
make_registered_model_permissions,
139141
make_experiment,
@@ -170,6 +172,16 @@ def test_e2e(
170172
([cluster], "cluster_id", RequestObjectType.CLUSTERS),
171173
)
172174

175+
cluster_policy = make_cluster_policy()
176+
make_cluster_policy_permissions(
177+
object_id=cluster_policy.policy_id,
178+
permission_level=random.choice([PermissionLevel.CAN_USE]),
179+
group_name=ws_group.display_name,
180+
)
181+
verifiable_objects.append(
182+
([cluster_policy], "policy_id", RequestObjectType.CLUSTER_POLICIES),
183+
)
184+
173185
model = make_model()
174186
make_registered_model_permissions(
175187
object_id=model.id,

tests/integration/utils.py

Lines changed: 1 addition & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,9 @@
11
import logging
2-
import random
32
from dataclasses import dataclass
4-
from typing import Any
53

6-
from databricks.sdk import WorkspaceClient
7-
from databricks.sdk.service.compute import ClusterSpec, DataSecurityMode
8-
from databricks.sdk.service.iam import AccessControlRequest, Group, PermissionLevel
9-
from databricks.sdk.service.jobs import JobCluster, PythonWheelTask, Task
4+
from databricks.sdk.service.iam import Group
105
from databricks.sdk.service.workspace import ObjectInfo
116

12-
from databricks.labs.ucx.inventory.types import RequestObjectType
13-
147
logger = logging.getLogger(__name__)
158

169

@@ -26,61 +19,6 @@ class EnvironmentInfo:
2619
groups: list[tuple[Group, Group]]
2720

2821

29-
def _set_random_permissions(
30-
objects: list[Any],
31-
id_attribute: str,
32-
request_object_type: RequestObjectType,
33-
env: EnvironmentInfo,
34-
ws: WorkspaceClient,
35-
permission_levels: list[PermissionLevel],
36-
num_acls: int | None = 3,
37-
):
38-
def get_random_ws_group() -> Group:
39-
return random.choice([g[0] for g in env.groups])
40-
41-
def get_random_permission_level() -> PermissionLevel:
42-
return random.choice(permission_levels)
43-
44-
for _object in objects:
45-
acl_req = [
46-
AccessControlRequest(
47-
group_name=get_random_ws_group().display_name, permission_level=get_random_permission_level()
48-
)
49-
for _ in range(num_acls)
50-
]
51-
52-
ws.permissions.update(
53-
request_object_type=request_object_type,
54-
request_object_id=getattr(_object, id_attribute),
55-
access_control_list=acl_req,
56-
)
57-
58-
59-
def _get_basic_job_cluster() -> JobCluster:
60-
return JobCluster(
61-
job_cluster_key="default",
62-
new_cluster=ClusterSpec(
63-
spark_version="13.2.x-scala2.12",
64-
node_type_id="i3.xlarge",
65-
driver_node_type_id="i3.xlarge",
66-
num_workers=0,
67-
spark_conf={"spark.master": "local[*, 4]", "spark.databricks.cluster.profile": "singleNode"},
68-
custom_tags={
69-
"ResourceClass": "SingleNode",
70-
},
71-
data_security_mode=DataSecurityMode.SINGLE_USER,
72-
),
73-
)
74-
75-
76-
def _get_basic_task() -> Task:
77-
return Task(
78-
task_key="test",
79-
python_wheel_task=PythonWheelTask(entry_point="main", package_name="some-pkg"),
80-
job_cluster_key="default",
81-
)
82-
83-
8422
@dataclass
8523
class WorkspaceObjects:
8624
root_dir: ObjectInfo

0 commit comments

Comments
 (0)