Skip to content

Commit 613ed1a

Browse files
authored
[Internal] Migrate Share Data Source to Plugin Framework (#4161)
## Changes <!-- Summary of your changes that are easy to understand --> This PR migrates the share/shares data sources to the Plugin framework. The code was largely copied "as is" from the previous implementation of the share data source, with the necessary adaptations made for integration with the Plugin framework. ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> ~~Note: current tests create shares using the SDKv2 resource, but fetch them using the new plugin framework data source. Once the resource migration will be merged, I will amend this.~~ Edit: Now that the resource itself is merged, the acceptance tests use the plugin framework's version of the resource. - [x] `make test` run locally - [ ] relevant change in `docs/` folder - [x] covered with integration tests in `internal/acceptance` - [x] relevant acceptance tests are passing - [x] using Go SDK --------- Co-authored-by: Omer Lachish <[email protected]>
1 parent dfa6bc0 commit 613ed1a

File tree

4 files changed

+246
-0
lines changed

4 files changed

+246
-0
lines changed

internal/providers/pluginfw/pluginfw.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@ func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []f
5858
volume.DataSourceVolumes,
5959
registered_model.DataSourceRegisteredModel,
6060
notificationdestinations.DataSourceNotificationDestinations,
61+
sharing.DataSourceShare,
62+
sharing.DataSourceShares,
6163
}
6264
}
6365

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
package sharing
2+
3+
import (
4+
"context"
5+
6+
"github.com/databricks/databricks-sdk-go/apierr"
7+
"github.com/databricks/databricks-sdk-go/service/sharing"
8+
"github.com/databricks/terraform-provider-databricks/common"
9+
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common"
10+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
11+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
12+
"github.com/databricks/terraform-provider-databricks/internal/service/sharing_tf"
13+
"github.com/hashicorp/terraform-plugin-framework/datasource"
14+
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
15+
)
16+
17+
func DataSourceShare() datasource.DataSource {
18+
return &ShareDataSource{}
19+
}
20+
21+
var _ datasource.DataSourceWithConfigure = &ShareDataSource{}
22+
23+
type ShareDataSource struct {
24+
Client *common.DatabricksClient
25+
}
26+
27+
func (d *ShareDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
28+
resp.TypeName = pluginfwcommon.GetDatabricksStagingName("share")
29+
}
30+
31+
func (d *ShareDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
32+
attrs, blocks := tfschema.DataSourceStructToSchemaMap(sharing_tf.ShareInfo{}, nil)
33+
resp.Schema = schema.Schema{
34+
Attributes: attrs,
35+
Blocks: blocks,
36+
}
37+
}
38+
39+
func (d *ShareDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
40+
if d.Client == nil {
41+
d.Client = pluginfwcommon.ConfigureDataSource(req, resp)
42+
}
43+
}
44+
45+
func (d *ShareDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
46+
w, diags := d.Client.GetWorkspaceClient()
47+
resp.Diagnostics.Append(diags...)
48+
if resp.Diagnostics.HasError() {
49+
return
50+
}
51+
52+
var config sharing_tf.ShareInfo
53+
diags = req.Config.Get(ctx, &config)
54+
resp.Diagnostics.Append(diags...)
55+
if resp.Diagnostics.HasError() {
56+
return
57+
}
58+
59+
share, err := w.Shares.Get(ctx, sharing.GetShareRequest{
60+
Name: config.Name.ValueString(),
61+
IncludeSharedData: true,
62+
})
63+
if err != nil {
64+
if apierr.IsMissing(err) {
65+
resp.State.RemoveResource(ctx)
66+
}
67+
68+
resp.Diagnostics.AddError("Failed to fetch share", err.Error())
69+
return
70+
}
71+
72+
var shareInfoTfSdk sharing_tf.ShareInfo
73+
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, share, &shareInfoTfSdk)...)
74+
if resp.Diagnostics.HasError() {
75+
return
76+
}
77+
78+
resp.Diagnostics.Append(resp.State.Set(ctx, shareInfoTfSdk)...)
79+
}
Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
package sharing
2+
3+
import (
4+
"context"
5+
6+
"github.com/hashicorp/terraform-plugin-framework/types"
7+
8+
"github.com/databricks/databricks-sdk-go/service/sharing"
9+
"github.com/databricks/terraform-provider-databricks/common"
10+
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common"
11+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
12+
"github.com/hashicorp/terraform-plugin-framework/datasource"
13+
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
14+
)
15+
16+
type SharesList struct {
17+
Shares []types.String `tfsdk:"shares" tf:"computed,optional,slice_set"`
18+
}
19+
20+
func DataSourceShares() datasource.DataSource {
21+
return &SharesDataSource{}
22+
}
23+
24+
var _ datasource.DataSourceWithConfigure = &SharesDataSource{}
25+
26+
type SharesDataSource struct {
27+
Client *common.DatabricksClient
28+
}
29+
30+
func (d *SharesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
31+
resp.TypeName = pluginfwcommon.GetDatabricksStagingName("shares")
32+
}
33+
34+
func (d *SharesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
35+
attrs, blocks := tfschema.DataSourceStructToSchemaMap(SharesList{}, nil)
36+
resp.Schema = schema.Schema{
37+
Attributes: attrs,
38+
Blocks: blocks,
39+
}
40+
}
41+
42+
func (d *SharesDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
43+
if d.Client == nil {
44+
d.Client = pluginfwcommon.ConfigureDataSource(req, resp)
45+
}
46+
}
47+
48+
func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
49+
w, diags := d.Client.GetWorkspaceClient()
50+
resp.Diagnostics.Append(diags...)
51+
if resp.Diagnostics.HasError() {
52+
return
53+
}
54+
55+
shares, err := w.Shares.ListAll(ctx, sharing.ListSharesRequest{})
56+
if err != nil {
57+
resp.Diagnostics.AddError("Failed to fetch shares", err.Error())
58+
return
59+
}
60+
61+
shareNames := make([]types.String, len(shares))
62+
for i, share := range shares {
63+
shareNames[i] = types.StringValue(share.Name)
64+
}
65+
66+
resp.Diagnostics.Append(resp.State.Set(ctx, SharesList{Shares: shareNames})...)
67+
}
Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
package sharing_test
2+
3+
import (
4+
"strconv"
5+
"testing"
6+
7+
"github.com/databricks/terraform-provider-databricks/internal/acceptance"
8+
"github.com/hashicorp/terraform-plugin-testing/terraform"
9+
"github.com/stretchr/testify/assert"
10+
"github.com/stretchr/testify/require"
11+
)
12+
13+
func checkSharesDataSourcePopulated(t *testing.T) func(s *terraform.State) error {
14+
return func(s *terraform.State) error {
15+
_, ok := s.Modules[0].Resources["data.databricks_shares_pluginframework.this"]
16+
require.True(t, ok, "data.databricks_shares_pluginframework.this has to be there")
17+
num_shares, _ := strconv.Atoi(s.Modules[0].Outputs["shares"].Value.(string))
18+
assert.GreaterOrEqual(t, num_shares, 1)
19+
return nil
20+
}
21+
}
22+
func TestUcAccDataSourceShares(t *testing.T) {
23+
acceptance.UnityWorkspaceLevel(t, acceptance.Step{
24+
Template: `
25+
resource "databricks_catalog" "sandbox" {
26+
name = "sandbox{var.RANDOM}"
27+
comment = "this catalog is managed by terraform"
28+
properties = {
29+
purpose = "testing"
30+
}
31+
}
32+
33+
resource "databricks_schema" "things" {
34+
catalog_name = databricks_catalog.sandbox.id
35+
name = "things{var.RANDOM}"
36+
comment = "this database is managed by terraform"
37+
properties = {
38+
kind = "various"
39+
}
40+
}
41+
42+
resource "databricks_table" "mytable" {
43+
catalog_name = databricks_catalog.sandbox.id
44+
schema_name = databricks_schema.things.name
45+
name = "bar"
46+
table_type = "MANAGED"
47+
data_source_format = "DELTA"
48+
49+
column {
50+
name = "id"
51+
position = 0
52+
type_name = "INT"
53+
type_text = "int"
54+
type_json = "{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}"
55+
}
56+
}
57+
58+
resource "databricks_table" "mytable_2" {
59+
catalog_name = databricks_catalog.sandbox.id
60+
schema_name = databricks_schema.things.name
61+
name = "bar_2"
62+
table_type = "MANAGED"
63+
data_source_format = "DELTA"
64+
65+
column {
66+
name = "id"
67+
position = 0
68+
type_name = "INT"
69+
type_text = "int"
70+
type_json = "{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}"
71+
}
72+
}
73+
74+
resource "databricks_share_pluginframework" "myshare" {
75+
name = "{var.RANDOM}-terraform-delta-share"
76+
object {
77+
name = databricks_table.mytable.id
78+
comment = "c"
79+
data_object_type = "TABLE"
80+
}
81+
object {
82+
name = databricks_table.mytable_2.id
83+
cdf_enabled = false
84+
comment = "c"
85+
data_object_type = "TABLE"
86+
}
87+
}
88+
89+
data "databricks_shares_pluginframework" "this" {
90+
depends_on = [databricks_share_pluginframework.myshare]
91+
}
92+
output "shares" {
93+
value = length(data.databricks_shares_pluginframework.this.shares)
94+
}
95+
`,
96+
Check: checkSharesDataSourcePopulated(t),
97+
})
98+
}

0 commit comments

Comments
 (0)