Skip to content

Commit 736679a

Browse files
authored
[Fix] Drift in databricks_cluster resource when using data_security_mode aliases (#4911)
## Changes Suppress diff for `data_security_mode` aliases in `databricks_cluster` resource Fixes #4909 ## Tests Tested in my environment and wrote unit tests: ```tf data "databricks_spark_version" "latest_lts" { long_term_support = true } resource "databricks_cluster" "dedicated" { cluster_name = "Dedicated cluster" spark_version = data.databricks_spark_version.latest_lts.id node_type_id = "i3.xlarge" kind = "CLASSIC_PREVIEW" data_security_mode = "DATA_SECURITY_MODE_DEDICATED" autotermination_minutes = 20 single_user_name = "[email protected]" autoscale { min_workers = 1 max_workers = 2 } } resource "databricks_cluster" "standard" { cluster_name = "Standard cluster" spark_version = data.databricks_spark_version.latest_lts.id node_type_id = "i3.xlarge" kind = "CLASSIC_PREVIEW" data_security_mode = "DATA_SECURITY_MODE_STANDARD" autotermination_minutes = 20 single_user_name = "[email protected]" autoscale { min_workers = 1 max_workers = 2 } } resource "databricks_cluster" "auto" { cluster_name = "Auto cluster" spark_version = data.databricks_spark_version.latest_lts.id node_type_id = "i3.xlarge" kind = "CLASSIC_PREVIEW" data_security_mode = "DATA_SECURITY_MODE_AUTO" autotermination_minutes = 20 single_user_name = "[email protected]" autoscale { min_workers = 1 max_workers = 2 } } ``` ``` > terraform apply Apply complete! Resources: 3 added, 0 changed, 0 destroyed. > terraform plan databricks_cluster.standard: Refreshing state... [id=0806-170022-8nt4x91u] databricks_cluster.auto: Refreshing state... [id=0806-170022-bu1bijv6] databricks_cluster.dedicated: Refreshing state... [id=0806-170022-ylvekmoi] No changes. Your infrastructure matches the configuration. ``` - [x] `make test` run locally - [x] relevant change in `docs/` folder - [ ] covered with integration tests in `internal/acceptance` - [x] using Go SDK - [x] using TF Plugin Framework - [x] has entry in `NEXT_CHANGELOG.md` file
1 parent bde694a commit 736679a

File tree

3 files changed

+189
-1
lines changed

3 files changed

+189
-1
lines changed

NEXT_CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88

99
### Bug Fixes
1010

11+
* Suppress `data_security_mode` aliases for `databricks_cluster` resource ([#4911](https://github.com/databricks/terraform-provider-databricks/pull/4911)).
12+
1113
### Documentation
1214

1315
* Document `email_notifications` block in model serving resources ([#4910](https://github.com/databricks/terraform-provider-databricks/pull/4910))

clusters/resource_cluster.go

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,18 @@ func SparkConfDiffSuppressFunc(k, old, new string, d *schema.ResourceData) bool
102102
return false
103103
}
104104

105+
// These are aliases which is the reason why we suppress these changes
106+
func DataSecurityModeDiffSuppressFunc(k, old, new string, d *schema.ResourceData) bool {
107+
if (old != "" && new == "") ||
108+
(old == "SINGLE_USER" && new == "DATA_SECURITY_MODE_DEDICATED") ||
109+
(old == "USER_ISOLATION" && new == "DATA_SECURITY_MODE_STANDARD") ||
110+
((old == "SINGLE_USER" || old == "USER_ISOLATION") && new == "DATA_SECURITY_MODE_AUTO") {
111+
log.Printf("[DEBUG] Suppressing diff for k=%#v old=%#v new=%#v", k, old, new)
112+
return true
113+
}
114+
return false
115+
}
116+
105117
// This method is a duplicate of ModifyRequestOnInstancePool() in clusters/clusters_api.go that uses Go SDK.
106118
// Long term, ModifyRequestOnInstancePool() in clusters_api.go will be removed once all the resources using clusters are migrated to Go SDK.
107119
func ModifyRequestOnInstancePool(cluster any) error {
@@ -336,7 +348,7 @@ func (ClusterSpec) CustomizeSchema(s *common.CustomizableSchema) *common.Customi
336348
Optional: true,
337349
ForceNew: true,
338350
})
339-
s.SchemaPath("data_security_mode").SetSuppressDiff()
351+
s.SchemaPath("data_security_mode").SetCustomSuppressDiff(DataSecurityModeDiffSuppressFunc)
340352
s.SchemaPath("docker_image", "url").SetRequired()
341353
s.SchemaPath("docker_image", "basic_auth", "password").SetRequired().SetSensitive()
342354
s.SchemaPath("docker_image", "basic_auth", "username").SetRequired()

clusters/resource_cluster_test.go

Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@ package clusters
22

33
import (
44
"fmt"
5+
"github.com/databricks/databricks-sdk-go/experimental/mocks"
6+
"github.com/databricks/databricks-sdk-go/listing"
7+
"github.com/stretchr/testify/mock"
58
"strings"
69
"testing"
710

@@ -1908,3 +1911,174 @@ func TestResourceClusterUpdate_LocalSsdCount(t *testing.T) {
19081911

19091912
assert.NoError(t, err)
19101913
}
1914+
1915+
func TestResourceClusterAliasDedicatedNoDrift_DataSecurityMode(t *testing.T) {
1916+
d, err := qa.ResourceFixture{
1917+
MockWorkspaceClientFunc: func(a *mocks.MockWorkspaceClient) {
1918+
api := a.GetMockClustersAPI().EXPECT()
1919+
api.List(mock.Anything, compute.ListClustersRequest{
1920+
FilterBy: &compute.ListClustersFilterBy{
1921+
IsPinned: true,
1922+
},
1923+
PageSize: 100,
1924+
}).Return(&listing.SliceIterator[compute.ClusterDetails]{})
1925+
api.GetByClusterId(mock.Anything, "abc").Return(
1926+
&compute.ClusterDetails{
1927+
ClusterId: "abc",
1928+
NumWorkers: 100,
1929+
ClusterName: "Dedicated Cluster",
1930+
DataSecurityMode: compute.DataSecurityModeSingleUser,
1931+
SparkVersion: "7.1-scala12",
1932+
NodeTypeId: "i3.xlarge",
1933+
AutoterminationMinutes: 15,
1934+
State: compute.StateTerminated,
1935+
GcpAttributes: &compute.GcpAttributes{
1936+
LocalSsdCount: 2,
1937+
},
1938+
}, nil,
1939+
)
1940+
},
1941+
ID: "abc",
1942+
Read: true,
1943+
Resource: ResourceCluster(),
1944+
InstanceState: map[string]string{
1945+
"autotermination_minutes": "15",
1946+
"cluster_name": "Dedicated Cluster",
1947+
"data_security_mode": "SINGLE_USER",
1948+
"spark_version": "7.1-scala12",
1949+
"node_type_id": "i3.xlarge",
1950+
"num_workers": "100",
1951+
"gcp_attributes": `"{
1952+
local_ssd_count = 2
1953+
}"`,
1954+
},
1955+
HCL: `
1956+
autotermination_minutes = 15,
1957+
cluster_name = "Dedicated Cluster"
1958+
data_security_mode = "DATA_SECURITY_MODE_DEDICATED"
1959+
spark_version = "7.1-scala12"
1960+
node_type_id = "i3.xlarge"
1961+
num_workers = 100
1962+
gcp_attributes = {
1963+
local_ssd_count = 0
1964+
},
1965+
`,
1966+
}.Apply(t)
1967+
1968+
assert.NoError(t, err)
1969+
assert.False(t, d.HasChanges("data_security_mode"))
1970+
}
1971+
1972+
func TestResourceClusterAliasStandardNoDrift_DataSecurityMode(t *testing.T) {
1973+
d, err := qa.ResourceFixture{
1974+
MockWorkspaceClientFunc: func(a *mocks.MockWorkspaceClient) {
1975+
api := a.GetMockClustersAPI().EXPECT()
1976+
api.List(mock.Anything, compute.ListClustersRequest{
1977+
FilterBy: &compute.ListClustersFilterBy{
1978+
IsPinned: true,
1979+
},
1980+
PageSize: 100,
1981+
}).Return(&listing.SliceIterator[compute.ClusterDetails]{})
1982+
api.GetByClusterId(mock.Anything, "abc").Return(
1983+
&compute.ClusterDetails{
1984+
ClusterId: "abc",
1985+
NumWorkers: 100,
1986+
ClusterName: "Standard Cluster",
1987+
DataSecurityMode: compute.DataSecurityModeUserIsolation,
1988+
SparkVersion: "7.1-scala12",
1989+
NodeTypeId: "i3.xlarge",
1990+
AutoterminationMinutes: 15,
1991+
State: compute.StateTerminated,
1992+
GcpAttributes: &compute.GcpAttributes{
1993+
LocalSsdCount: 2,
1994+
},
1995+
}, nil,
1996+
)
1997+
},
1998+
ID: "abc",
1999+
Read: true,
2000+
Resource: ResourceCluster(),
2001+
InstanceState: map[string]string{
2002+
"autotermination_minutes": "15",
2003+
"cluster_name": "Standard Cluster",
2004+
"data_security_mode": "USER_ISOLATION",
2005+
"spark_version": "7.1-scala12",
2006+
"node_type_id": "i3.xlarge",
2007+
"num_workers": "100",
2008+
"gcp_attributes": `"{
2009+
local_ssd_count = 2
2010+
}"`,
2011+
},
2012+
HCL: `
2013+
autotermination_minutes = 15,
2014+
cluster_name = "Standard Cluster"
2015+
data_security_mode = "DATA_SECURITY_MODE_STANDARD"
2016+
spark_version = "7.1-scala12"
2017+
node_type_id = "i3.xlarge"
2018+
num_workers = 100
2019+
gcp_attributes = {
2020+
local_ssd_count = 0
2021+
},
2022+
`,
2023+
}.Apply(t)
2024+
2025+
assert.NoError(t, err)
2026+
assert.False(t, d.HasChanges("data_security_mode"))
2027+
}
2028+
2029+
func TestResourceClusterAliasAutoNoDrift_DataSecurityMode(t *testing.T) {
2030+
d, err := qa.ResourceFixture{
2031+
MockWorkspaceClientFunc: func(a *mocks.MockWorkspaceClient) {
2032+
api := a.GetMockClustersAPI().EXPECT()
2033+
api.List(mock.Anything, compute.ListClustersRequest{
2034+
FilterBy: &compute.ListClustersFilterBy{
2035+
IsPinned: true,
2036+
},
2037+
PageSize: 100,
2038+
}).Return(&listing.SliceIterator[compute.ClusterDetails]{})
2039+
api.GetByClusterId(mock.Anything, "abc").Return(
2040+
&compute.ClusterDetails{
2041+
ClusterId: "abc",
2042+
NumWorkers: 100,
2043+
ClusterName: "Auto Cluster",
2044+
DataSecurityMode: compute.DataSecurityModeUserIsolation,
2045+
SparkVersion: "7.1-scala12",
2046+
NodeTypeId: "i3.xlarge",
2047+
AutoterminationMinutes: 15,
2048+
State: compute.StateTerminated,
2049+
GcpAttributes: &compute.GcpAttributes{
2050+
LocalSsdCount: 2,
2051+
},
2052+
}, nil,
2053+
)
2054+
},
2055+
ID: "abc",
2056+
Read: true,
2057+
Resource: ResourceCluster(),
2058+
InstanceState: map[string]string{
2059+
"autotermination_minutes": "15",
2060+
"cluster_name": "Auto Cluster",
2061+
"data_security_mode": "USER_ISOLATION",
2062+
"spark_version": "7.1-scala12",
2063+
"node_type_id": "i3.xlarge",
2064+
"num_workers": "100",
2065+
"gcp_attributes": `"{
2066+
local_ssd_count = 2
2067+
}"`,
2068+
},
2069+
HCL: `
2070+
autotermination_minutes = 15,
2071+
cluster_name = "Auto Cluster"
2072+
data_security_mode = "DATA_SECURITY_MODE_AUTO"
2073+
spark_version = "7.1-scala12"
2074+
node_type_id = "i3.xlarge"
2075+
num_workers = 100
2076+
gcp_attributes = {
2077+
local_ssd_count = 0
2078+
},
2079+
`,
2080+
}.Apply(t)
2081+
2082+
assert.NoError(t, err)
2083+
assert.False(t, d.HasChanges("data_security_mode"))
2084+
}

0 commit comments

Comments
 (0)