Skip to content

Commit 7612114

Browse files
authored
Added databricks_views data resource (#1283)
* Added `databricks_views` data resource for listing of views in Unity Catalog managed schema * Made `databricks_tables` return only managed or external tables in Unity Catalog. Fix #1274
1 parent a194b3a commit 7612114

File tree

10 files changed

+199
-9
lines changed

10 files changed

+199
-9
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Version changelog
22

3+
## 0.5.6
4+
5+
* Added `databricks_views` data resource, making `databricks_tables` return only managed or external tables in Unity Catalog ([#1274](https://github.com/databrickslabs/terraform-provider-databricks/issues/1274)).
6+
37
## 0.5.5
48

59
* Added configuration generators for `databricks_sql_*` resources in _experimental_ [Resource Exporter](https://asciinema.org/a/Rv8ZFJQpfrfp6ggWddjtyXaOy) ([#1199](https://github.com/databrickslabs/terraform-provider-databricks/pull/1199)).

catalog/data_tables.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,9 @@ func DataSourceTables() *schema.Resource {
2121
return err
2222
}
2323
for _, v := range tables.Tables {
24-
data.Ids = append(data.Ids, v.FullName())
24+
if v.TableType != "VIEW" {
25+
data.Ids = append(data.Ids, v.FullName())
26+
}
2527
}
2628
return nil
2729
})

catalog/data_views.go

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
package catalog
2+
3+
import (
4+
"context"
5+
6+
"github.com/databrickslabs/terraform-provider-databricks/common"
7+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
8+
)
9+
10+
func DataSourceViews() *schema.Resource {
11+
type viewsData struct {
12+
CatalogName string `json:"catalog_name"`
13+
SchemaName string `json:"schema_name"`
14+
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
15+
}
16+
return common.DataResource(viewsData{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error {
17+
data := e.(*viewsData)
18+
tablesAPI := NewTablesAPI(ctx, c)
19+
tables, err := tablesAPI.listTables(data.CatalogName, data.SchemaName)
20+
if err != nil {
21+
return err
22+
}
23+
for _, v := range tables.Tables {
24+
if v.TableType == "VIEW" {
25+
data.Ids = append(data.Ids, v.FullName())
26+
}
27+
}
28+
return nil
29+
})
30+
}

catalog/data_views_test.go

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
package catalog
2+
3+
import (
4+
"testing"
5+
6+
"github.com/databrickslabs/terraform-provider-databricks/qa"
7+
)
8+
9+
func TestViewsData(t *testing.T) {
10+
qa.ResourceFixture{
11+
Fixtures: []qa.HTTPFixture{
12+
{
13+
Method: "GET",
14+
Resource: "/api/2.0/unity-catalog/tables/?catalog_name=a&schema_name=b",
15+
Response: Tables{
16+
Tables: []TableInfo{
17+
{
18+
CatalogName: "a",
19+
SchemaName: "b",
20+
Name: "c",
21+
TableType: "MANAGED",
22+
},
23+
{
24+
CatalogName: "a",
25+
SchemaName: "b",
26+
Name: "d",
27+
TableType: "VIEW",
28+
},
29+
},
30+
},
31+
},
32+
},
33+
Resource: DataSourceViews(),
34+
HCL: `
35+
catalog_name = "a"
36+
schema_name = "b"`,
37+
Read: true,
38+
NonWritable: true,
39+
ID: "_",
40+
}.ApplyAndExpectData(t, map[string]interface{}{
41+
"ids": []string{"a.b.d"},
42+
})
43+
}
44+
45+
func TestViewsData_Error(t *testing.T) {
46+
qa.ResourceFixture{
47+
Fixtures: qa.HTTPFailures,
48+
Resource: DataSourceViews(),
49+
Read: true,
50+
NonWritable: true,
51+
ID: "_",
52+
}.ExpectError(t, "I'm a teapot")
53+
}

docs/data-sources/tables.md

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,20 +5,27 @@ subcategory: "Unity Catalog"
55

66
-> **Note** If you have a fully automated setup with workspaces created by [databricks_mws_workspaces](../resources/mws_workspaces.md) or [azurerm_databricks_workspace](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/databricks_workspace), please make sure to add [depends_on attribute](../index.md#data-resources-and-authentication-is-not-configured-errors) in order to prevent _authentication is not configured for provider_ errors.
77

8-
Retrieves a list of [databricks_table](../resources/table.md) ids, that were created by Terraform or manually, so that special handling could be applied.
8+
Retrieves a list of managed or external table full names in Unity Catalog, that were created by Terraform or manually. Use [databricks_views](views.md) for retrieving a list of views.
99

1010
## Example Usage
1111

12-
Listing all tables in a _things_ [databricks_schema](../resources/schema.md) from _sandbox_ [databricks_catalog](../resources/catalog.md):
12+
Granting `SELECT` and `MODIFY` to `sensitive` group on all tables a _things_ [databricks_schema](../resources/schema.md) from _sandbox_ [databricks_catalog](../resources/catalog.md):
1313

1414
```hcl
1515
data "databricks_tables" "things" {
1616
catalog_name = "sandbox"
17-
schema_name = "things"
17+
schema_name = "things"
1818
}
1919
20-
output "all_things_tables" {
21-
value = data.databricks_tables.things
20+
resource "databricks_grants" "things" {
21+
for_each = data.databricks_tables.things.ids
22+
23+
table = each.value
24+
25+
grant {
26+
principal = "sensitive"
27+
privileges = ["SELECT", "MODIFY"]
28+
}
2229
}
2330
```
2431

docs/data-sources/views.md

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
---
2+
subcategory: "Unity Catalog"
3+
---
4+
# databricks_views Data Source
5+
6+
-> **Note** If you have a fully automated setup with workspaces created by [databricks_mws_workspaces](../resources/mws_workspaces.md) or [azurerm_databricks_workspace](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/databricks_workspace), please make sure to add [depends_on attribute](../index.md#data-resources-and-authentication-is-not-configured-errors) in order to prevent _authentication is not configured for provider_ errors.
7+
8+
Retrieves a list of view full names in Unity Catalog, that were created by Terraform or manually. Use [databricks_tables](tables.md) for retrieving a list of tables.
9+
10+
## Example Usage
11+
12+
Granting `SELECT` and `MODIFY` to `sensitive` group on all views in a _things_ [databricks_schema](../resources/schema.md) from _sandbox_ [databricks_catalog](../resources/catalog.md).
13+
14+
```hcl
15+
data "databricks_views" "things" {
16+
catalog_name = "sandbox"
17+
schema_name = "things"
18+
}
19+
20+
resource "databricks_grants" "things" {
21+
for_each = data.databricks_views.things.ids
22+
23+
view = each.value
24+
25+
grant {
26+
principal = "sensitive"
27+
privileges = ["SELECT", "MODIFY"]
28+
}
29+
}
30+
```
31+
32+
## Argument Reference
33+
34+
* `catalog_name` - (Required) Name of [databricks_catalog](../resources/catalog.md)
35+
* `schema_name` - (Required) Name of [databricks_schema](../resources/schema.md)
36+
37+
## Attribute Reference
38+
39+
This data source exports the following attributes:
40+
41+
* `ids` - set of [databricks_table](../resources/table.md) full names: *`catalog`.`schema`.`view`*
42+
43+
## Related Resources
44+
45+
The following resources are used in the same context:
46+
47+
* [databricks_table](../resources/table.md) to manage tables within Unity Catalog.
48+
* [databricks_schema](../resources/schema.md) to manage schemas within Unity Catalog.
49+
* [databricks_catalog](../resources/catalog.md) to manage catalogs within Unity Catalog.

docs/resources/grants.md

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,26 @@ resource "databricks_grants" "customers" {
9090
}
9191
```
9292

93+
You can also apply grants dynamically with [databricks_tables](../data-sources/tables.md) data resource:
94+
95+
```hcl
96+
data "databricks_tables" "things" {
97+
catalog_name = "sandbox"
98+
schema_name = "things"
99+
}
100+
101+
resource "databricks_grants" "things" {
102+
for_each = data.databricks_tables.things.ids
103+
104+
table = each.value
105+
106+
grant {
107+
principal = "sensitive"
108+
privileges = ["SELECT", "MODIFY"]
109+
}
110+
}
111+
```
112+
93113
## View grants
94114

95115
You can grant `SELECT` privileges to [*`catalog`*.*`database`*.*`view`*](table.md) specified in `view` attribute. You can define a view through [databricks_table](table.md) resource.
@@ -104,6 +124,26 @@ resource "databricks_grants" "customer360" {
104124
}
105125
```
106126

127+
You can also apply grants dynamically with [databricks_views](../data-sources/views.md) data resource:
128+
129+
```hcl
130+
data "databricks_views" "customers" {
131+
catalog_name = "main"
132+
schema_name = "customers"
133+
}
134+
135+
resource "databricks_grants" "customers" {
136+
for_each = data.databricks_views.customers.ids
137+
138+
view = each.value
139+
140+
grant {
141+
principal = "sensitive"
142+
privileges = ["SELECT", "MODIFY"]
143+
}
144+
}
145+
```
146+
107147
## Storage credential grants
108148

109149
You can grant `CREATE_TABLE`, `READ_FILES`, and `WRITE_FILES` privileges to [databricks_storage_credential](storage_credential.md) id specified in `storage_credential` attribute:

docs/resources/sql_permissions.md

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,7 @@ subcategory: "Security"
33
---
44
# databricks_sql_permissions Resource
55

6-
-> **Note** Please switch to [databricks_grants](grants.md) with Unity Catalog to manage data access, which provides better and faster way for managing data security. `databricks_grants` resource *doesn't require a technical cluster to perform operations*. `databricks_sql_permissions` will be removed, once Unity Catalog is Generally Available.
7-
8-
-> **Note** On workspaces with Unity Catalog enabled, you may run into errors such as `Error: cannot create sql permissions: cannot read current grants: For unity catalog, please specify the catalog name explicitly. E.g. SHOW GRANT ``[email protected]`` ON CATALOG main`. This happens if your `default_catalog_name` was set to a UC catalog instead of `hive_metastore`. The workaround is to re-assign the metastore again with the default catalog set to be `hive_metastore`. See [databricks_metastore_assignment](metastore_assignment.md).
6+
-> **Note** Please switch to [databricks_grants](grants.md) with Unity Catalog to manage data access, which provides better and faster way for managing data security. `databricks_grants` resource *doesn't require a technical cluster to perform operations*. On workspaces with Unity Catalog enabled, you may run into errors such as `Error: cannot create sql permissions: cannot read current grants: For unity catalog, please specify the catalog name explicitly. E.g. SHOW GRANT ``[email protected]`` ON CATALOG main`. This happens if your `default_catalog_name` was set to a UC catalog instead of `hive_metastore`. The workaround is to re-assign the metastore again with the default catalog set to be `hive_metastore`. See [databricks_metastore_assignment](metastore_assignment.md).
97

108
This resource manages data object access control lists in Databricks workspaces for things like tables, views, databases, and [more](https://docs.databricks.com/security/access-control/table-acls/object-privileges.html). In order to enable Table Access control, you have to login to the workspace as administrator, go to `Admin Console`, pick `Access Control` tab, click on `Enable` button in `Table Access Control` section, and click `Confirm`. The security guarantees of table access control **will only be effective if cluster access control is also turned on**. Please make sure that no users can create clusters in your workspace and all [databricks_cluster](cluster.md) have approximately the following configuration:
119

provider/provider.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ func DatabricksProvider() *schema.Provider {
5252
"databricks_schemas": catalog.DataSourceSchemas(),
5353
"databricks_spark_version": clusters.DataSourceSparkVersion(),
5454
"databricks_tables": catalog.DataSourceTables(),
55+
"databricks_views": catalog.DataSourceViews(),
5556
"databricks_user": scim.DataSourceUser(),
5657
"databricks_zones": clusters.DataSourceClusterZones(),
5758
},

qa/testing.go

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,12 @@ func (f ResourceFixture) ApplyAndExpectData(t *testing.T, data map[string]interf
265265
for k, expected := range data {
266266
if k == "id" {
267267
assert.Equal(t, expected, d.Id())
268+
} else if that, ok := d.Get(k).(*schema.Set); ok {
269+
this := expected.([]string)
270+
assert.Equal(t, len(this), that.Len(), "set has different length")
271+
for _, item := range this {
272+
assert.True(t, that.Contains(item), "set does not contain %s", item)
273+
}
268274
} else {
269275
assert.Equal(t, expected, d.Get(k))
270276
}

0 commit comments

Comments
 (0)