Skip to content

Commit 66a881f

Browse files
authored
Relaxed cluster check for databricks_sql_permissions (#3683)
* relax cluster check * fix * fix
1 parent eaa2772 commit 66a881f

File tree

3 files changed

+121
-33
lines changed

3 files changed

+121
-33
lines changed

access/resource_sql_permissions.go

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -262,8 +262,8 @@ func (ta *SqlPermissions) initCluster(ctx context.Context, d *schema.ResourceDat
262262
if err != nil {
263263
return
264264
}
265-
if v, ok := clusterInfo.SparkConf["spark.databricks.acl.dfAclsEnabled"]; !ok || v != "true" {
266-
err = fmt.Errorf("cluster_id: not a High-Concurrency cluster: %s (%s)",
265+
if v, ok := clusterInfo.SparkConf["spark.databricks.acl.dfAclsEnabled"]; (!ok || v != "true") && clusterInfo.DataSecurityMode != "USER_ISOLATION" && clusterInfo.DataSecurityMode != "LEGACY_TABLE_ACL" {
266+
err = fmt.Errorf("cluster_id: pecified cluster does not support setting Table ACL: %s (%s)",
267267
clusterInfo.ClusterName, clusterInfo.ClusterID)
268268
return
269269
}
@@ -282,11 +282,10 @@ func (ta *SqlPermissions) getOrCreateCluster(clustersAPI clusters.ClustersAPI) (
282282
SparkVersion: sparkVersion,
283283
NodeTypeID: nodeType,
284284
AutoterminationMinutes: 10,
285+
DataSecurityMode: "LEGACY_TABLE_ACL",
285286
SparkConf: map[string]string{
286-
"spark.databricks.acl.dfAclsEnabled": "true",
287-
"spark.databricks.repl.allowedLanguages": "python,sql",
288-
"spark.databricks.cluster.profile": "singleNode",
289-
"spark.master": "local[*]",
287+
"spark.databricks.cluster.profile": "singleNode",
288+
"spark.master": "local[*]",
290289
},
291290
CustomTags: map[string]string{
292291
"ResourceClass": "SingleNode",
@@ -305,8 +304,7 @@ func tableAclForUpdate(ctx context.Context, d *schema.ResourceData,
305304
return
306305
}
307306

308-
func tableAclForLoad(ctx context.Context, d *schema.ResourceData,
309-
s map[string]*schema.Schema, c *common.DatabricksClient) (ta SqlPermissions, err error) {
307+
func tableAclForLoad(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) (ta SqlPermissions, err error) {
310308
ta, err = loadTableACL(d.Id())
311309
if err != nil {
312310
return
@@ -345,7 +343,7 @@ func ResourceSqlPermissions() common.Resource {
345343
return nil
346344
},
347345
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
348-
ta, err := tableAclForLoad(ctx, d, s, c)
346+
ta, err := tableAclForLoad(ctx, d, c)
349347
if err != nil {
350348
return err
351349
}
@@ -370,7 +368,7 @@ func ResourceSqlPermissions() common.Resource {
370368
return ta.enforce()
371369
},
372370
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
373-
ta, err := tableAclForLoad(ctx, d, s, c)
371+
ta, err := tableAclForLoad(ctx, d, c)
374372
if err != nil {
375373
return err
376374
}

access/resource_sql_permissions_test.go

Lines changed: 101 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -227,11 +227,10 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
227227
"ResourceClass": "SingleNode",
228228
},
229229
SparkConf: map[string]string{
230-
"spark.databricks.acl.dfAclsEnabled": "true",
231-
"spark.databricks.repl.allowedLanguages": "python,sql",
232-
"spark.databricks.cluster.profile": "singleNode",
233-
"spark.master": "local[*]",
230+
"spark.databricks.cluster.profile": "singleNode",
231+
"spark.master": "local[*]",
234232
},
233+
DataSecurityMode: "LEGACY_TABLE_ACL",
235234
},
236235
Response: clusters.ClusterID{
237236
ClusterID: "bcd",
@@ -242,13 +241,87 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
242241
ReuseRequest: true,
243242
Resource: "/api/2.0/clusters/get?cluster_id=bcd",
244243
Response: clusters.ClusterInfo{
245-
ClusterID: "bcd",
246-
State: "RUNNING",
244+
ClusterID: "bcd",
245+
State: "RUNNING",
246+
DataSecurityMode: "LEGACY_TABLE_ACL",
247+
SparkConf: map[string]string{
248+
"spark.databricks.cluster.profile": "singleNode",
249+
},
250+
},
251+
},
252+
}
253+
254+
var createSharedCluster = []qa.HTTPFixture{
255+
{
256+
Method: "GET",
257+
ReuseRequest: true,
258+
Resource: "/api/2.0/clusters/list",
259+
Response: map[string]any{},
260+
},
261+
{
262+
Method: "GET",
263+
ReuseRequest: true,
264+
Resource: "/api/2.0/clusters/spark-versions",
265+
Response: clusters.SparkVersionsList{
266+
SparkVersions: []clusters.SparkVersion{
267+
{
268+
Version: "7.1.x-cpu-ml-scala2.12",
269+
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
270+
},
271+
},
272+
},
273+
},
274+
{
275+
Method: "GET",
276+
ReuseRequest: true,
277+
Resource: "/api/2.0/clusters/list-node-types",
278+
Response: compute.ListNodeTypesResponse{
279+
NodeTypes: []compute.NodeType{
280+
{
281+
NodeTypeId: "Standard_F4s",
282+
InstanceTypeId: "Standard_F4s",
283+
MemoryMb: 8192,
284+
NumCores: 4,
285+
NodeInstanceType: &compute.NodeInstanceType{
286+
LocalDisks: 1,
287+
InstanceTypeId: "Standard_F4s",
288+
LocalDiskSizeGb: 16,
289+
},
290+
},
291+
},
292+
},
293+
},
294+
{
295+
Method: "POST",
296+
ReuseRequest: true,
297+
Resource: "/api/2.0/clusters/create",
298+
ExpectedRequest: clusters.Cluster{
299+
AutoterminationMinutes: 10,
300+
ClusterName: "terraform-table-acl",
301+
NodeTypeID: "Standard_F4s",
302+
SparkVersion: "7.3.x-scala2.12",
303+
CustomTags: map[string]string{
304+
"ResourceClass": "SingleNode",
305+
},
306+
DataSecurityMode: "LEGACY_TABLE_ACL",
247307
SparkConf: map[string]string{
248-
"spark.databricks.acl.dfAclsEnabled": "true",
249-
"spark.databricks.cluster.profile": "singleNode",
308+
"spark.databricks.cluster.profile": "singleNode",
309+
"spark.master": "local[*]",
250310
},
251311
},
312+
Response: clusters.ClusterID{
313+
ClusterID: "bcd",
314+
},
315+
},
316+
{
317+
Method: "GET",
318+
ReuseRequest: true,
319+
Resource: "/api/2.0/clusters/get?cluster_id=bcd",
320+
Response: clusters.ClusterInfo{
321+
ClusterID: "bcd",
322+
State: "RUNNING",
323+
DataSecurityMode: "USER_ISOLATION",
324+
},
252325
},
253326
}
254327

@@ -272,6 +345,26 @@ func TestResourceSqlPermissions_Read(t *testing.T) {
272345
}.ApplyNoError(t)
273346
}
274347

348+
func TestResourceSqlPermissions_ReadSharedCluster(t *testing.T) {
349+
qa.ResourceFixture{
350+
CommandMock: mockData{
351+
"SHOW GRANT ON TABLE `default`.`foo`": {
352+
{"users", "SELECT", "database", "foo"},
353+
{"users", "SELECT", "table", "`default`.`foo`"},
354+
{"[email protected]", "OWN", "table", "`default`.`foo`"},
355+
{"users", "READ", "table", "`default`.`foo`"},
356+
{"users", "SELECT", "database", "default"},
357+
{"interns", "DENIED_SELECT", "table", "`default`.`foo`"},
358+
},
359+
}.toCommandMock(),
360+
Fixtures: createSharedCluster,
361+
Resource: ResourceSqlPermissions(),
362+
Read: true,
363+
New: true,
364+
ID: "table/default.foo",
365+
}.ApplyNoError(t)
366+
}
367+
275368
func TestResourceSqlPermissions_Read_Error(t *testing.T) {
276369
qa.ResourceFixture{
277370
Resource: ResourceSqlPermissions(),

docs/resources/sql_permissions.md

Lines changed: 12 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -19,18 +19,6 @@ resource "databricks_cluster" "cluster_with_table_access_control" {
1919
}
2020
```
2121

22-
It could be combined with creation of High-Concurrency and Single-Node clusters - in this case it should have corresponding `custom_tags` and `spark.databricks.cluster.profile` in Spark configuration as described in [documentation for `databricks_cluster` resource](cluster.md).
23-
24-
The created cluster could be referred to by providing its ID as `cluster_id` property.
25-
26-
27-
```hcl
28-
resource "databricks_sql_permissions" "foo_table" {
29-
cluster_id = databricks_cluster.cluster_name.id
30-
#...
31-
}
32-
```
33-
3422
It is required to define all permissions for a securable in a single resource, otherwise Terraform cannot guarantee config drift prevention.
3523

3624
## Example Usage
@@ -60,11 +48,20 @@ resource "databricks_sql_permissions" "foo_table" {
6048

6149
## Argument Reference
6250

51+
* `cluster_id` - (Optional) Id of an existing [databricks_cluster](cluster.md), where the appropriate `GRANT`/`REVOKE` commands are executed. This cluster must have the appropriate data security mode (`USER_ISOLATION` or `LEGACY_TABLE_ACL` specified). If no `cluster_id` is specified, a single-node TACL cluster named `terraform-table-acl` is automatically created.
52+
53+
```hcl
54+
resource "databricks_sql_permissions" "foo_table" {
55+
cluster_id = databricks_cluster.cluster_name.id
56+
#...
57+
}
58+
```
59+
6360
The following arguments are available to specify the data object you need to enforce access controls on. You must specify only one of those arguments (except for `table` and `view`), otherwise resource creation will fail.
6461

6562
* `database` - Name of the database. Has default value of `default`.
66-
* `table` - Name of the table. Can be combined with `database`.
67-
* `view` - Name of the view. Can be combined with `database`.
63+
* `table` - Name of the table. Can be combined with `database`.
64+
* `view` - Name of the view. Can be combined with `database`.
6865
* `catalog` - (Boolean) If this access control for the entire catalog. Defaults to `false`.
6966
* `any_file` - (Boolean) If this access control for reading/writing any file. Defaults to `false`.
7067
* `anonymous_function` - (Boolean) If this access control for using anonymous function. Defaults to `false`.
@@ -100,7 +97,7 @@ The resource can be imported using a synthetic identifier. Examples of valid syn
10097
* `anonymous function/` - anonymous function. `/` suffix is mandatory.
10198

10299
```bash
103-
$ terraform import databricks_sql_permissions.foo /<object-type>/<object-name>
100+
terraform import databricks_sql_permissions.foo /<object-type>/<object-name>
104101
```
105102

106103
## Related Resources

0 commit comments

Comments
 (0)