Skip to content

Commit 27ad78d

Browse files
authored
[Fix] Don't use single-node cluster for databricks_sql_permissions (#4141)
## Changes <!-- Summary of your changes that are easy to understand --> Due to the internal changes in the backend API, it doesn't allow to create non-UC single-node clusters. Until the fix is implemented on the backend, create a cluster for setting permissions consisting of 1 worker. Also, went through the documentation's grammar. Resolves #4140 ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [ ] covered with integration tests in `internal/acceptance` - [ ] relevant acceptance tests are passing - [ ] using Go SDK
1 parent 6f8a9c9 commit 27ad78d

File tree

4 files changed

+52
-44
lines changed

4 files changed

+52
-44
lines changed

NEXT_CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66

77
### Bug Fixes
88

9+
* Fix automatic cluster creation for `databricks_sql_permissions` ([#4141](https://github.com/databricks/terraform-provider-databricks/pull/4141))
10+
911
### Documentation
1012

1113
### Exporter

access/resource_sql_permissions.go

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -277,7 +277,8 @@ func (ta *SqlPermissions) initCluster(ctx context.Context, d *schema.ResourceDat
277277

278278
func (ta *SqlPermissions) getOrCreateCluster(clustersAPI clusters.ClustersAPI) (string, error) {
279279
sparkVersion := clusters.LatestSparkVersionOrDefault(clustersAPI.Context(), clustersAPI.WorkspaceClient(), compute.SparkVersionRequest{
280-
Latest: true,
280+
Latest: true,
281+
LongTermSupport: true,
281282
})
282283
nodeType := clustersAPI.GetSmallestNodeType(compute.NodeTypeRequest{LocalDisk: true})
283284
aclCluster, err := clustersAPI.GetOrCreateRunningCluster(
@@ -287,13 +288,15 @@ func (ta *SqlPermissions) getOrCreateCluster(clustersAPI clusters.ClustersAPI) (
287288
NodeTypeID: nodeType,
288289
AutoterminationMinutes: 10,
289290
DataSecurityMode: "LEGACY_TABLE_ACL",
290-
SparkConf: map[string]string{
291-
"spark.databricks.cluster.profile": "singleNode",
292-
"spark.master": "local[*]",
293-
},
294-
CustomTags: map[string]string{
295-
"ResourceClass": "SingleNode",
296-
},
291+
// TODO: return back after backend fix is rolled out
292+
NumWorkers: 1,
293+
// SparkConf: map[string]string{
294+
// "spark.databricks.cluster.profile": "singleNode",
295+
// "spark.master": "local[*]",
296+
// },
297+
// CustomTags: map[string]string{
298+
// "ResourceClass": "SingleNode",
299+
// },
297300
})
298301
if err != nil {
299302
return "", err

access/resource_sql_permissions_test.go

Lines changed: 27 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -188,8 +188,8 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
188188
Response: compute.GetSparkVersionsResponse{
189189
Versions: []compute.SparkVersion{
190190
{
191-
Key: "7.1.x-cpu-ml-scala2.12",
192-
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
191+
Key: "15.4.x-scala2.12",
192+
Name: "15.4 LTS (includes Apache Spark 3.5.0, Scala 2.12)",
193193
},
194194
},
195195
},
@@ -222,15 +222,16 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
222222
AutoterminationMinutes: 10,
223223
ClusterName: "terraform-table-acl",
224224
NodeTypeID: "Standard_F4s",
225-
SparkVersion: "11.3.x-scala2.12",
226-
CustomTags: map[string]string{
227-
"ResourceClass": "SingleNode",
228-
},
229-
SparkConf: map[string]string{
230-
"spark.databricks.cluster.profile": "singleNode",
231-
"spark.master": "local[*]",
232-
},
233-
DataSecurityMode: "LEGACY_TABLE_ACL",
225+
SparkVersion: "15.4.x-scala2.12",
226+
DataSecurityMode: "LEGACY_TABLE_ACL",
227+
NumWorkers: 1,
228+
// CustomTags: map[string]string{
229+
// "ResourceClass": "SingleNode",
230+
// },
231+
// SparkConf: map[string]string{
232+
// "spark.databricks.cluster.profile": "singleNode",
233+
// "spark.master": "local[*]",
234+
// },
234235
},
235236
Response: clusters.ClusterID{
236237
ClusterID: "bcd",
@@ -244,9 +245,9 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
244245
ClusterID: "bcd",
245246
State: "RUNNING",
246247
DataSecurityMode: "LEGACY_TABLE_ACL",
247-
SparkConf: map[string]string{
248-
"spark.databricks.cluster.profile": "singleNode",
249-
},
248+
// SparkConf: map[string]string{
249+
// "spark.databricks.cluster.profile": "singleNode",
250+
// },
250251
},
251252
},
252253
}
@@ -265,8 +266,8 @@ var createSharedCluster = []qa.HTTPFixture{
265266
Response: compute.GetSparkVersionsResponse{
266267
Versions: []compute.SparkVersion{
267268
{
268-
Key: "7.1.x-cpu-ml-scala2.12",
269-
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
269+
Key: "15.4.x-scala2.12",
270+
Name: "15.4 LTS (includes Apache Spark 3.5.0, Scala 2.12)",
270271
},
271272
},
272273
},
@@ -299,15 +300,16 @@ var createSharedCluster = []qa.HTTPFixture{
299300
AutoterminationMinutes: 10,
300301
ClusterName: "terraform-table-acl",
301302
NodeTypeID: "Standard_F4s",
302-
SparkVersion: "11.3.x-scala2.12",
303-
CustomTags: map[string]string{
304-
"ResourceClass": "SingleNode",
305-
},
306-
DataSecurityMode: "LEGACY_TABLE_ACL",
307-
SparkConf: map[string]string{
308-
"spark.databricks.cluster.profile": "singleNode",
309-
"spark.master": "local[*]",
310-
},
303+
SparkVersion: "15.4.x-scala2.12",
304+
DataSecurityMode: "LEGACY_TABLE_ACL",
305+
NumWorkers: 1,
306+
// CustomTags: map[string]string{
307+
// "ResourceClass": "SingleNode",
308+
// },
309+
// SparkConf: map[string]string{
310+
// "spark.databricks.cluster.profile": "singleNode",
311+
// "spark.master": "local[*]",
312+
// },
311313
},
312314
Response: clusters.ClusterID{
313315
ClusterID: "bcd",

docs/resources/sql_permissions.md

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,13 @@ subcategory: "Security"
33
---
44
# databricks_sql_permissions Resource
55

6-
-> Please switch to [databricks_grants](grants.md) with Unity Catalog to manage data access, which provides a better and faster way for managing data security. `databricks_grants` resource *doesn't require a technical cluster to perform operations*. On workspaces with Unity Catalog enabled, you may run into errors such as `Error: cannot create sql permissions: cannot read current grants: For unity catalog, please specify the catalog name explicitly. E.g. SHOW GRANT ``[email protected]`` ON CATALOG main`. This happens if your `default_catalog_name` was set to a UC catalog instead of `hive_metastore`. The workaround is to re-assign the metastore again with the default catalog set to be `hive_metastore`. See [databricks_metastore_assignment](metastore_assignment.md).
6+
-> Please switch to [databricks_grants](grants.md) with Unity Catalog to manage data access, which provides a better and faster way for managing data security. `databricks_grants` resource *doesn't require a technical cluster to perform operations*. On workspaces with Unity Catalog enabled, you may run into errors such as `Error: cannot create sql permissions: cannot read current grants: For unity catalog, please specify the catalog name explicitly. E.g. SHOW GRANT ``[email protected]`` ON CATALOG main`. This happens if your `default_catalog_name` was set to a UC catalog instead of `hive_metastore`. The workaround is to re-assign the metastore again with the default catalog set to `hive_metastore`. See [databricks_metastore_assignment](metastore_assignment.md).
77

8-
This resource manages data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html). In order to enable Table Access control, you have to login to the workspace as administrator, go to `Admin Console`, pick `Access Control` tab, click on `Enable` button in `Table Access Control` section, and click `Confirm`. The security guarantees of table access control **will only be effective if cluster access control is also turned on**. Please make sure that no users can create clusters in your workspace and all [databricks_cluster](cluster.md) have approximately the following configuration:
8+
This resource manages data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html). In order to enable Table Access control, you have to login to the workspace as administrator, go to `Admin Console`, pick the `Access Control` tab, click on the `Enable` button in the `Table Access Control` section, and click `Confirm`. The security guarantees of table access control **will only be effective if cluster access control is also turned on**. Please make sure that no users can create clusters in your workspace and all [databricks_cluster](cluster.md) have approximately the following configuration:
99

1010
```hcl
1111
resource "databricks_cluster" "cluster_with_table_access_control" {
1212
// ...
13-
1413
spark_conf = {
1514
"spark.databricks.acl.dfAclsEnabled" : "true",
1615
"spark.databricks.repl.allowedLanguages" : "python,sql",
@@ -34,11 +33,13 @@ The following resource definition will enforce access control on a table by exec
3433
resource "databricks_sql_permissions" "foo_table" {
3534
table = "foo"
3635
36+
3737
privilege_assignments {
3838
principal = "[email protected]"
3939
privileges = ["SELECT", "MODIFY"]
4040
}
4141
42+
4243
privilege_assignments {
4344
principal = "special group"
4445
privileges = ["SELECT"]
@@ -48,7 +49,7 @@ resource "databricks_sql_permissions" "foo_table" {
4849

4950
## Argument Reference
5051

51-
* `cluster_id` - (Optional) Id of an existing [databricks_cluster](cluster.md), where the appropriate `GRANT`/`REVOKE` commands are executed. This cluster must have the appropriate access mode (`USER_ISOLATION` or `LEGACY_TABLE_ACL` specified). If no `cluster_id` is specified, a single-node TACL cluster named `terraform-table-acl` is automatically created.
52+
* `cluster_id` - (Optional) Id of an existing [databricks_cluster](cluster.md), where the appropriate `GRANT`/`REVOKE` commands are executed. This cluster must have the appropriate data security mode (`USER_ISOLATION` or `LEGACY_TABLE_ACL` specified). If no `cluster_id` is specified, a TACL-enabled cluster with the name `terraform-table-acl` is automatically created.
5253

5354
```hcl
5455
resource "databricks_sql_permissions" "foo_table" {
@@ -59,12 +60,12 @@ resource "databricks_sql_permissions" "foo_table" {
5960

6061
The following arguments are available to specify the data object you need to enforce access controls on. You must specify only one of those arguments (except for `table` and `view`), otherwise resource creation will fail.
6162

62-
* `database` - Name of the database. Has default value of `default`.
63-
* `table` - Name of the table. Can be combined with `database`.
64-
* `view` - Name of the view. Can be combined with `database`.
63+
* `database` - Name of the database. Has a default value of `default`.
64+
* `table` - Name of the table. Can be combined with the `database`.
65+
* `view` - Name of the view. Can be combined with the `database`.
6566
* `catalog` - (Boolean) If this access control for the entire catalog. Defaults to `false`.
6667
* `any_file` - (Boolean) If this access control for reading/writing any file. Defaults to `false`.
67-
* `anonymous_function` - (Boolean) If this access control for using anonymous function. Defaults to `false`.
68+
* `anonymous_function` - (Boolean) If this access control for using an anonymous function. Defaults to `false`.
6869

6970
### `privilege_assignments` blocks
7071

@@ -81,15 +82,15 @@ You must specify one or many `privilege_assignments` configuration blocks to dec
8182
* `USAGE` - do not give any abilities, but is an additional requirement to perform any action on a database object.
8283
* `READ_METADATA` - gives the ability to view an object and its metadata.
8384
* `CREATE_NAMED_FUNCTION` - gives the ability to create a named UDF in an existing catalog or database.
84-
* `MODIFY_CLASSPATH` - gives the ability to add files to the Spark class path.
85+
* `MODIFY_CLASSPATH` - gives the ability to add files to the Spark classpath.
8586

86-
-> Even though the value `ALL PRIVILEGES` is mentioned in Table ACL documentation, it's not recommended to use it from terraform, as it may result in unnecessary state updates.
87+
-> Even though the value `ALL PRIVILEGES` is mentioned in Table ACL documentation, it's not recommended to use it from Terraform, as it may result in unnecessary state updates.
8788

8889
## Import
8990

9091
The resource can be imported using a synthetic identifier. Examples of valid synthetic identifiers are:
9192

92-
* `table/default.foo` - table `foo` in a `default` database. Database is always mandatory.
93+
* `table/default.foo` - table `foo` in a `default` database. The `database` is always mandatory.
9394
* `view/bar.foo` - view `foo` in `bar` database.
9495
* `database/bar` - `bar` database.
9596
* `catalog/` - entire catalog. `/` suffix is mandatory.

0 commit comments

Comments
 (0)