Skip to content

Commit 1e62576

Browse files
authored
[Internal] Migrate databricks_cluster data source to plugin framework (#3988)
## Changes <!-- Summary of your changes that are easy to understand --> - Migrates `databricks_cluster` data source to plugin framework - Check for different int and float types in Go to Tf converter function - Use computed tag to simplify customize schema for volumes Note: The resource will be suffixed with `_pluginframework` and will be made default in another PR so the change is easily revertible. ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - Added Integration tests - Added Unit tests All are passing - [x] `make test` run locally - [ ] relevant change in `docs/` folder - [x] covered with integration tests in `internal/acceptance` - [x] relevant acceptance tests are passing - [x] using Go SDK
1 parent 891e0af commit 1e62576

File tree

9 files changed

+209
-15
lines changed

9 files changed

+209
-15
lines changed

internal/providers/pluginfw/converters/go_to_tf.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ func goSdkToTfSdkSingleField(ctx context.Context, srcField reflect.Value, destFi
114114
} else {
115115
destField.Set(reflect.ValueOf(types.BoolNull()))
116116
}
117-
case reflect.Int64:
117+
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
118118
// convert any kind of integer to int64
119119
intVal := srcField.Convert(reflect.TypeOf(int64(0))).Int()
120120
// check if the value is non-zero or if the field is in the forceSendFields list
@@ -123,7 +123,7 @@ func goSdkToTfSdkSingleField(ctx context.Context, srcField reflect.Value, destFi
123123
} else {
124124
destField.Set(reflect.ValueOf(types.Int64Null()))
125125
}
126-
case reflect.Float64:
126+
case reflect.Float32, reflect.Float64:
127127
// convert any kind of float to float64
128128
float64Val := srcField.Convert(reflect.TypeOf(float64(0))).Float()
129129
// check if the value is non-zero or if the field is in the forceSendFields list

internal/providers/pluginfw/pluginfw.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import (
1616
"github.com/databricks/terraform-provider-databricks/commands"
1717
"github.com/databricks/terraform-provider-databricks/common"
1818
providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common"
19+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/cluster"
1920
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
2021
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
2122
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"
@@ -49,6 +50,7 @@ func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []fun
4950

5051
func (p *DatabricksProviderPluginFramework) DataSources(ctx context.Context) []func() datasource.DataSource {
5152
return []func() datasource.DataSource{
53+
cluster.DataSourceCluster,
5254
volume.DataSourceVolumes,
5355
}
5456
}
Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
package cluster
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"strings"
7+
8+
"github.com/databricks/databricks-sdk-go/apierr"
9+
"github.com/databricks/databricks-sdk-go/service/compute"
10+
"github.com/databricks/terraform-provider-databricks/common"
11+
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common"
12+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
13+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
14+
"github.com/databricks/terraform-provider-databricks/internal/service/compute_tf"
15+
"github.com/hashicorp/terraform-plugin-framework/datasource"
16+
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
17+
"github.com/hashicorp/terraform-plugin-framework/diag"
18+
"github.com/hashicorp/terraform-plugin-framework/types"
19+
)
20+
21+
func DataSourceCluster() datasource.DataSource {
22+
return &ClusterDataSource{}
23+
}
24+
25+
var _ datasource.DataSourceWithConfigure = &ClusterDataSource{}
26+
27+
type ClusterDataSource struct {
28+
Client *common.DatabricksClient
29+
}
30+
31+
type ClusterInfo struct {
32+
ClusterId types.String `tfsdk:"cluster_id" tf:"optional,computed"`
33+
Name types.String `tfsdk:"cluster_name" tf:"optional,computed"`
34+
ClusterInfo *compute_tf.ClusterDetails `tfsdk:"cluster_info" tf:"optional,computed"`
35+
}
36+
37+
func (d *ClusterDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
38+
resp.TypeName = "databricks_cluster_pluginframework"
39+
}
40+
41+
func (d *ClusterDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
42+
resp.Schema = schema.Schema{
43+
Attributes: tfschema.DataSourceStructToSchemaMap(ClusterInfo{}, nil),
44+
}
45+
}
46+
47+
func (d *ClusterDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) {
48+
if d.Client == nil {
49+
d.Client = pluginfwcommon.ConfigureDataSource(req, resp)
50+
}
51+
}
52+
53+
func validateClustersList(ctx context.Context, clusters []compute_tf.ClusterDetails, clusterName string) diag.Diagnostics {
54+
if len(clusters) == 0 {
55+
return diag.Diagnostics{diag.NewErrorDiagnostic(fmt.Sprintf("there is no cluster with name '%s'", clusterName), "")}
56+
}
57+
if len(clusters) > 1 {
58+
clusterIDs := []string{}
59+
for _, cluster := range clusters {
60+
clusterIDs = append(clusterIDs, cluster.ClusterId.ValueString())
61+
}
62+
return diag.Diagnostics{diag.NewErrorDiagnostic(fmt.Sprintf("there is more than one cluster with name '%s'", clusterName), fmt.Sprintf("The IDs of those clusters are: %s. When specifying a cluster name, the name must be unique. Alternatively, specify the cluster by ID using the cluster_id attribute.", strings.Join(clusterIDs, ", ")))}
63+
}
64+
return nil
65+
}
66+
67+
func (d *ClusterDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
68+
w, diags := d.Client.GetWorkspaceClient()
69+
resp.Diagnostics.Append(diags...)
70+
if resp.Diagnostics.HasError() {
71+
return
72+
}
73+
74+
var clusterInfo ClusterInfo
75+
resp.Diagnostics.Append(req.Config.Get(ctx, &clusterInfo)...)
76+
if resp.Diagnostics.HasError() {
77+
return
78+
}
79+
clusterName := clusterInfo.Name.ValueString()
80+
clusterId := clusterInfo.ClusterId.ValueString()
81+
if clusterName != "" {
82+
clustersGoSDk, err := w.Clusters.ListAll(ctx, compute.ListClustersRequest{})
83+
if err != nil {
84+
resp.Diagnostics.AddError("failed to list clusters", err.Error())
85+
return
86+
}
87+
var clustersTfSDK []compute_tf.ClusterDetails
88+
for _, cluster := range clustersGoSDk {
89+
var clusterDetails compute_tf.ClusterDetails
90+
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, cluster, &clusterDetails)...)
91+
if resp.Diagnostics.HasError() {
92+
return
93+
}
94+
clustersTfSDK = append(clustersTfSDK, clusterDetails)
95+
}
96+
namedClusters := []compute_tf.ClusterDetails{}
97+
for _, cluster := range clustersTfSDK {
98+
if cluster.ClusterName == clusterInfo.Name {
99+
namedClusters = append(namedClusters, cluster)
100+
}
101+
}
102+
resp.Diagnostics.Append(validateClustersList(ctx, namedClusters, clusterName)...)
103+
if resp.Diagnostics.HasError() {
104+
return
105+
}
106+
clusterInfo.ClusterInfo = &namedClusters[0]
107+
} else if clusterId != "" {
108+
cluster, err := w.Clusters.GetByClusterId(ctx, clusterId)
109+
if err != nil {
110+
if apierr.IsMissing(err) {
111+
resp.State.RemoveResource(ctx)
112+
}
113+
resp.Diagnostics.AddError(fmt.Sprintf("failed to get cluster with cluster id: %s", clusterId), err.Error())
114+
return
115+
}
116+
var clusterDetails compute_tf.ClusterDetails
117+
resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, cluster, &clusterDetails)...)
118+
if resp.Diagnostics.HasError() {
119+
return
120+
}
121+
clusterInfo.ClusterInfo = &clusterDetails
122+
} else {
123+
resp.Diagnostics.AddError("you need to specify either `cluster_name` or `cluster_id`", "")
124+
return
125+
}
126+
clusterInfo.ClusterId = clusterInfo.ClusterInfo.ClusterId
127+
clusterInfo.Name = clusterInfo.ClusterInfo.ClusterName
128+
resp.Diagnostics.Append(resp.State.Set(ctx, clusterInfo)...)
129+
}
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
package cluster_test
2+
3+
import (
4+
"testing"
5+
6+
"github.com/databricks/terraform-provider-databricks/internal/acceptance"
7+
)
8+
9+
const dataClusterTemplateById = `
10+
data "databricks_cluster_pluginframework" "by_id" {
11+
cluster_id = "{env.TEST_DEFAULT_CLUSTER_ID}"
12+
}
13+
`
14+
15+
func TestAccDataSourceClusterByID(t *testing.T) {
16+
acceptance.WorkspaceLevel(t, acceptance.Step{
17+
Template: dataClusterTemplateById,
18+
})
19+
}
20+
21+
func TestAccDataSourceClusterByName(t *testing.T) {
22+
acceptance.WorkspaceLevel(t, acceptance.Step{
23+
Template: dataClusterTemplateById + `
24+
data "databricks_cluster_pluginframework" "by_name" {
25+
cluster_name = data.databricks_cluster_pluginframework.by_id.cluster_name
26+
}`,
27+
})
28+
}
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
package cluster
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"testing"
7+
8+
"github.com/databricks/terraform-provider-databricks/internal/service/compute_tf"
9+
"github.com/hashicorp/terraform-plugin-framework/diag"
10+
"github.com/hashicorp/terraform-plugin-framework/types"
11+
"github.com/stretchr/testify/assert"
12+
)
13+
14+
func TestNoClusterError(t *testing.T) {
15+
clusterName := "test-cluster-name"
16+
clusters := []compute_tf.ClusterDetails{}
17+
actualDiagnostics := validateClustersList(context.Background(), clusters, clusterName)
18+
expectedDiagnostics := diag.Diagnostics{diag.NewErrorDiagnostic(fmt.Sprintf("there is no cluster with name '%s'", clusterName), "")}
19+
assert.True(t, actualDiagnostics.HasError())
20+
assert.Equal(t, expectedDiagnostics, actualDiagnostics)
21+
}
22+
23+
func TestMultipleClustersError(t *testing.T) {
24+
clusterName := "test-cluster-name"
25+
clusters := []compute_tf.ClusterDetails{
26+
{
27+
ClusterName: types.StringValue("test-cluster-name"),
28+
ClusterId: types.StringValue("123"),
29+
},
30+
{
31+
ClusterName: types.StringValue("test-cluster-name"),
32+
ClusterId: types.StringValue("456"),
33+
},
34+
}
35+
actualDiagnostics := validateClustersList(context.Background(), clusters, clusterName)
36+
expectedDiagnostics := diag.Diagnostics{diag.NewErrorDiagnostic(fmt.Sprintf("there is more than one cluster with name '%s'", clusterName), "The IDs of those clusters are: 123, 456. When specifying a cluster name, the name must be unique. Alternatively, specify the cluster by ID using the cluster_id attribute.")}
37+
assert.True(t, actualDiagnostics.HasError())
38+
assert.Equal(t, expectedDiagnostics, actualDiagnostics)
39+
}

internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_test.go renamed to internal/providers/pluginfw/resources/qualitymonitor/resource_quality_monitor_acc_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ resource "databricks_sql_table" "myInferenceTable" {
4848
4949
`
5050

51-
func TestUcAccQualityMonitorPluginFramework(t *testing.T) {
51+
func TestUcAccQualityMonitor(t *testing.T) {
5252
if os.Getenv("GOOGLE_CREDENTIALS") != "" {
5353
t.Skipf("databricks_quality_monitor resource is not available on GCP")
5454
}
@@ -115,7 +115,7 @@ func TestUcAccQualityMonitorPluginFramework(t *testing.T) {
115115
})
116116
}
117117

118-
func TestUcAccUpdateQualityMonitorPluginFramework(t *testing.T) {
118+
func TestUcAccUpdateQualityMonitor(t *testing.T) {
119119
if os.Getenv("GOOGLE_CREDENTIALS") != "" {
120120
t.Skipf("databricks_quality_monitor resource is not available on GCP")
121121
}

internal/providers/pluginfw/resources/volume/data_volumes.go

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ type VolumesDataSource struct {
2828
type VolumesList struct {
2929
CatalogName types.String `tfsdk:"catalog_name"`
3030
SchemaName types.String `tfsdk:"schema_name"`
31-
Ids []types.String `tfsdk:"ids" tf:"optional"`
31+
Ids []types.String `tfsdk:"ids" tf:"optional,computed"`
3232
}
3333

3434
func (d *VolumesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) {
@@ -37,10 +37,7 @@ func (d *VolumesDataSource) Metadata(ctx context.Context, req datasource.Metadat
3737

3838
func (d *VolumesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
3939
resp.Schema = schema.Schema{
40-
Attributes: tfschema.DataSourceStructToSchemaMap(VolumesList{}, func(c tfschema.CustomizableSchema) tfschema.CustomizableSchema {
41-
c.SetComputed("ids")
42-
return c
43-
}),
40+
Attributes: tfschema.DataSourceStructToSchemaMap(VolumesList{}, nil),
4441
}
4542
}
4643

@@ -69,13 +66,12 @@ func (d *VolumesDataSource) Read(ctx context.Context, req datasource.ReadRequest
6966
if err != nil {
7067
if apierr.IsMissing(err) {
7168
resp.State.RemoveResource(ctx)
72-
return
7369
}
74-
resp.Diagnostics.AddError(fmt.Sprintf("Failed to get volumes for the catalog:%s and schema%s", listVolumesRequest.CatalogName, listVolumesRequest.SchemaName), err.Error())
70+
resp.Diagnostics.AddError(fmt.Sprintf("failed to get volumes for the catalog:%s and schema%s", listVolumesRequest.CatalogName, listVolumesRequest.SchemaName), err.Error())
7571
return
7672
}
7773
for _, v := range volumes {
7874
volumesList.Ids = append(volumesList.Ids, types.StringValue(v.FullName))
7975
}
80-
resp.State.Set(ctx, volumesList)
76+
resp.Diagnostics.Append(resp.State.Set(ctx, volumesList)...)
8177
}

internal/providers/pluginfw/resources/volume/data_volumes_test.go renamed to internal/providers/pluginfw/resources/volume/data_volumes_acc_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import (
1010
"github.com/stretchr/testify/require"
1111
)
1212

13-
func checkDataSourceVolumesPluginFrameworkPopulated(t *testing.T) func(s *terraform.State) error {
13+
func checkDataSourceVolumesPopulated(t *testing.T) func(s *terraform.State) error {
1414
return func(s *terraform.State) error {
1515
_, ok := s.Modules[0].Resources["data.databricks_volumes_pluginframework.this"]
1616
require.True(t, ok, "data.databricks_volumes_pluginframework.this has to be there")
@@ -20,7 +20,7 @@ func checkDataSourceVolumesPluginFrameworkPopulated(t *testing.T) func(s *terraf
2020
}
2121
}
2222

23-
func TestUcAccDataSourceVolumesPluginFramework(t *testing.T) {
23+
func TestUcAccDataSourceVolumes(t *testing.T) {
2424
acceptance.UnityWorkspaceLevel(t, acceptance.Step{
2525
Template: `
2626
resource "databricks_catalog" "sandbox" {
@@ -54,6 +54,6 @@ func TestUcAccDataSourceVolumesPluginFramework(t *testing.T) {
5454
value = length(data.databricks_volumes_pluginframework.this.ids)
5555
}
5656
`,
57-
Check: checkDataSourceVolumesPluginFrameworkPopulated(t),
57+
Check: checkDataSourceVolumesPopulated(t),
5858
})
5959
}

0 commit comments

Comments
 (0)