Skip to content

Commit 12210ff

Browse files
authored
Added databricks_recipient resource for Delta Sharing (#1571)
1 parent 479745e commit 12210ff

File tree

5 files changed

+319
-0
lines changed

5 files changed

+319
-0
lines changed
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
package acceptance
2+
3+
import (
4+
"testing"
5+
6+
"github.com/databricks/terraform-provider-databricks/internal/acceptance"
7+
"github.com/databricks/terraform-provider-databricks/qa"
8+
)
9+
10+
func TestAccCreateRecipientDb2Open(t *testing.T) {
11+
qa.RequireCloudEnv(t, "aws-uc-prod")
12+
acceptance.Test(t, []acceptance.Step{
13+
{
14+
Template: `
15+
resource "databricks_recipient" "db2open" {
16+
name = "{var.RANDOM}-terraform-db2open-recipient"
17+
comment = "made by terraform"
18+
authentication_type = "TOKEN"
19+
sharing_code = "{var.RANDOM}"
20+
ip_access_list {
21+
allowed_ip_addresses = ["10.0.0.0/16"] // using private ip for acc testing
22+
}
23+
}`,
24+
},
25+
})
26+
}
27+
28+
func TestAccCreateRecipientDb2DbAws(t *testing.T) {
29+
qa.RequireCloudEnv(t, "aws-uc-prod")
30+
acceptance.Test(t, []acceptance.Step{
31+
{
32+
Template: `
33+
resource "databricks_metastore" "recipient_metastore" {
34+
name = "{var.RANDOM}-terraform-recipient-metastore"
35+
storage_root = format("s3a://%s/%s", "{var.RANDOM}", "{var.RANDOM}")
36+
delta_sharing_scope = "INTERNAL"
37+
delta_sharing_recipient_token_lifetime_in_seconds = "60000"
38+
force_destroy = true
39+
lifecycle { ignore_changes = [storage_root] } // fake storage root is causing issues
40+
}
41+
42+
resource "databricks_recipient" "db2db" {
43+
name = "{var.RANDOM}-terraform-db2db-recipient"
44+
comment = "made by terraform"
45+
authentication_type = "DATABRICKS"
46+
data_recipient_global_metastore_id = databricks_metastore.recipient_metastore.global_metastore_id
47+
}`,
48+
},
49+
})
50+
}

catalog/resource_recipient.go

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
package catalog
2+
3+
import (
4+
"context"
5+
"github.com/databricks/terraform-provider-databricks/common"
6+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
7+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
8+
)
9+
10+
type RecipientsAPI struct {
11+
client *common.DatabricksClient
12+
context context.Context
13+
}
14+
15+
func NewRecipientsAPI(ctx context.Context, m interface{}) RecipientsAPI {
16+
return RecipientsAPI{m.(*common.DatabricksClient), ctx}
17+
}
18+
19+
type Token struct {
20+
Id string `json:"id,omitempty" tf:"computed"`
21+
CreatedAt int64 `json:"created_at,omitempty" tf:"computed"`
22+
CreatedBy string `json:"created_by,omitempty" tf:"computed"`
23+
ActivationUrl string `json:"activation_url,omitempty" tf:"computed"`
24+
ExpirationTime int64 `json:"expiration_time,omitempty" tf:"computed"`
25+
UpdatedAt int64 `json:"updated_at,omitempty" tf:"computed"`
26+
UpdatedBy string `json:"updated_by,omitempty" tf:"computed"`
27+
}
28+
29+
type IpAccessList struct {
30+
AllowedIpAddresses []string `json:"allowed_ip_addresses"`
31+
}
32+
33+
type RecipientInfo struct {
34+
Name string `json:"name" tf:"force_new"`
35+
Comment string `json:"comment,omitempty"`
36+
SharingCode string `json:"sharing_code,omitempty" tf:"sensitive,force_new,suppress_diff"`
37+
AuthenticationType string `json:"authentication_type" tf:"force_new"`
38+
Tokens []Token `json:"tokens,omitempty" tf:"computed"`
39+
DataRecipientGlobalMetastoreId string `json:"data_recipient_global_metastore_id,omitempty" tf:"force_new,conflicts:ip_access_list"`
40+
IpAccessList *IpAccessList `json:"ip_access_list,omitempty"`
41+
}
42+
43+
type Recipients struct {
44+
Recipients []RecipientInfo `json:"recipients"`
45+
}
46+
47+
func (a RecipientsAPI) createRecipient(ci *RecipientInfo) error {
48+
return a.client.Post(a.context, "/unity-catalog/recipients", ci, ci)
49+
}
50+
51+
func (a RecipientsAPI) getRecipient(name string) (ci RecipientInfo, err error) {
52+
err = a.client.Get(a.context, "/unity-catalog/recipients/"+name, nil, &ci)
53+
return
54+
}
55+
56+
func (a RecipientsAPI) deleteRecipient(name string) error {
57+
return a.client.Delete(a.context, "/unity-catalog/recipients/"+name, nil)
58+
}
59+
60+
func (a RecipientsAPI) updateRecipient(ci *RecipientInfo) error {
61+
patch := map[string]interface{}{"comment": ci.Comment, "ip_access_list": ci.IpAccessList}
62+
63+
return a.client.Patch(a.context, "/unity-catalog/recipients/"+ci.Name, patch)
64+
}
65+
66+
func ResourceRecipient() *schema.Resource {
67+
recipientSchema := common.StructToSchema(RecipientInfo{}, func(m map[string]*schema.Schema) map[string]*schema.Schema {
68+
m["authentication_type"].ValidateFunc = validation.StringInSlice([]string{"TOKEN", "DATABRICKS"}, false)
69+
return m
70+
})
71+
return common.Resource{
72+
Schema: recipientSchema,
73+
Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
74+
var ri RecipientInfo
75+
common.DataToStructPointer(d, recipientSchema, &ri)
76+
if err := NewRecipientsAPI(ctx, c).createRecipient(&ri); err != nil {
77+
return err
78+
}
79+
d.SetId(ri.Name)
80+
return nil
81+
},
82+
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
83+
ri, err := NewRecipientsAPI(ctx, c).getRecipient(d.Id())
84+
if err != nil {
85+
return err
86+
}
87+
return common.StructToData(ri, recipientSchema, d)
88+
},
89+
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
90+
var ri RecipientInfo
91+
common.DataToStructPointer(d, recipientSchema, &ri)
92+
return NewRecipientsAPI(ctx, c).updateRecipient(&ri)
93+
},
94+
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
95+
return NewRecipientsAPI(ctx, c).deleteRecipient(d.Id())
96+
},
97+
}.ToResource()
98+
}

catalog/resource_recipient_test.go

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
package catalog
2+
3+
import (
4+
"testing"
5+
6+
"github.com/databricks/terraform-provider-databricks/qa"
7+
)
8+
9+
func TestRecipientCornerCases(t *testing.T) {
10+
qa.ResourceCornerCases(t, ResourceRecipient())
11+
}
12+
13+
func TestCreateRecipient(t *testing.T) {
14+
qa.ResourceFixture{
15+
Fixtures: []qa.HTTPFixture{
16+
{
17+
Method: "POST",
18+
Resource: "/api/2.0/unity-catalog/recipients",
19+
ExpectedRequest: RecipientInfo{
20+
Name: "a",
21+
Comment: "b",
22+
SharingCode: "c",
23+
AuthenticationType: "TOKEN",
24+
Tokens: nil,
25+
IpAccessList: &IpAccessList{
26+
AllowedIpAddresses: []string{"0.0.0.0/0"},
27+
},
28+
},
29+
Response: RecipientInfo{
30+
Name: "a",
31+
},
32+
},
33+
{
34+
Method: "GET",
35+
Resource: "/api/2.0/unity-catalog/recipients/a",
36+
Response: RecipientInfo{
37+
Name: "a",
38+
Comment: "b",
39+
SharingCode: "c",
40+
AuthenticationType: "TOKEN",
41+
Tokens: nil,
42+
IpAccessList: &IpAccessList{
43+
AllowedIpAddresses: []string{"0.0.0.0/0"},
44+
},
45+
},
46+
},
47+
},
48+
Resource: ResourceRecipient(),
49+
Create: true,
50+
HCL: `
51+
name = "a"
52+
comment = "b"
53+
authentication_type = "TOKEN"
54+
sharing_code = "c"
55+
ip_access_list {
56+
allowed_ip_addresses = ["0.0.0.0/0"]
57+
}
58+
`,
59+
}.ApplyNoError(t)
60+
}
61+
62+
func TestCreateRecipient_InvalidAuthType(t *testing.T) {
63+
qa.ResourceFixture{
64+
Fixtures: []qa.HTTPFixture{},
65+
Resource: ResourceRecipient(),
66+
Create: true,
67+
HCL: `
68+
name = "a"
69+
comment = "b"
70+
authentication_type = "temp"
71+
sharing_code = "c"
72+
ip_access_list {
73+
allowed_ip_addresses = ["0.0.0.0/0"]
74+
}
75+
`,
76+
}.ExpectError(t, "invalid config supplied. "+
77+
"[authentication_type] expected authentication_type "+
78+
"to be one of [TOKEN DATABRICKS], got temp")
79+
80+
}

docs/resources/recipient.md

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
---
2+
subcategory: "Unity Catalog"
3+
---
4+
# databricks_recipient Resource
5+
6+
Within a metastore, Unity Catalog provides the ability to create a recipient to attach delta shares to.
7+
8+
A `databricks_recipient` is contained within [databricks_metastore](metastore.md) and can contain a list of shares.
9+
10+
## Example Usage
11+
12+
### Databricks Sharing with non databricks recipient
13+
14+
Setting `authentication_type` type to `TOKEN` creates a temporary url to download a credentials file. This is used to
15+
authenticate to the sharing server to access data. This is for when the recipient is not using Databricks.
16+
17+
```hcl
18+
resource "random_password" "db2opensharecode" {
19+
length = 16
20+
special = true
21+
}
22+
23+
data "databricks_current_user" "current" {}
24+
25+
resource "databricks_recipient" "db2open" {
26+
name = "${data.databricks_current_user.current.alphanumeric}-recipient"
27+
comment = "made by terraform"
28+
authentication_type = "TOKEN"
29+
sharing_code = random_password.db2opensharecode.result
30+
ip_access_list {
31+
allowed_ip_addresses = [...] // .. fill in allowed IPv4 addresses (CIDR notation allowed)
32+
}
33+
}
34+
```
35+
36+
### Databricks to Databricks Sharing
37+
38+
Setting `authentication_type` type to `DATABRICKS` allows you to automatically create a provider for a recipient who
39+
is using Databricks. To do this they would need to provide the global metastore id that you will be sharing with. The
40+
global metastore id follows the format: `<cloud>:<region>:<guid>`
41+
42+
```hcl
43+
data "databricks_current_user" "current" {}
44+
45+
resource "databricks_metastore" "recipient_metastore" {
46+
name = "recipient"
47+
storage_root = format("abfss://%s@%s.dfs.core.windows.net/",
48+
azurerm_storage_account.unity_catalog.name,
49+
azurerm_storage_container.unity_catalog.name)
50+
delta_sharing_scope = "INTERNAL"
51+
delta_sharing_recipient_token_lifetime_in_seconds = "60000000"
52+
force_destroy = true
53+
}
54+
55+
resource "databricks_recipient" "db2db" {
56+
name = "${data.databricks_current_user.current.alphanumeric}-recipient"
57+
comment = "made by terraform"
58+
authentication_type = "DATABRICKS"
59+
data_recipient_global_metastore_id = databricks_metastore.recipient_metastore.global_metastore_id
60+
}
61+
```
62+
63+
## Argument Reference
64+
65+
The following arguments are required:
66+
67+
* `name` - Name of recipient. Change forces creation of a new resource.
68+
* `comment` - (Optional) Description about the recipient.
69+
* `sharing_code` - (Optional) The one-time sharing code provided by the data recipient.
70+
* `authentication_type` - (Optional) The delta sharing authentication type. Valid values are `TOKEN` and `DATABRICKS`.
71+
* `data_recipient_global_metastore_id` - Required when authentication_type is DATABRICKS.
72+
* `ip_access_list` - (Optional) The one-time sharing code provided by the data recipient.
73+
74+
### Ip Access List Argument
75+
Only one `ip_access_list` blocks is allowed in a recipient. It conflicts with authentication type DATABRICKS.
76+
77+
```hcl
78+
ip_access_list {
79+
allowed_ip_addresses = ["0.0.0.0/0"]
80+
}
81+
```
82+
83+
Arguments for the `ip_access_list` block are:
84+
85+
Exactly one of the below arguments is required:
86+
* `allowed_ip_addresses` - Allowed IP Addresses in CIDR notation. Limit of 100.
87+
88+
## Attribute Reference:
89+
90+
* `tokens` - (Optional) List of Recipient Tokens.

provider/provider.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@ func DatabricksProvider() *schema.Provider {
106106
"databricks_permission_assignment": access.ResourcePermissionAssignment(),
107107
"databricks_permissions": permissions.ResourcePermissions(),
108108
"databricks_pipeline": pipelines.ResourcePipeline(),
109+
"databricks_recipient": catalog.ResourceRecipient(),
109110
"databricks_repo": repos.ResourceRepo(),
110111
"databricks_schema": catalog.ResourceSchema(),
111112
"databricks_secret": secrets.ResourceSecret(),

0 commit comments

Comments
 (0)