Skip to content

Commit f0b00d8

Browse files
authored
Added support for tf:suppress_diff on primitive types (#1074)
Fix #984
1 parent aa186f9 commit f0b00d8

File tree

6 files changed

+34
-4
lines changed

6 files changed

+34
-4
lines changed

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Version changelog
22

3+
## 0.4.8
4+
5+
* Added support for `tf:suppress_diff` on primitive types ([#984](https://github.com/databrickslabs/terraform-provider-databricks/issues/984)).
6+
7+
Updated dependency versions:
8+
9+
* Bump google.golang.org/api from 0.65.0 to 0.66.0
10+
311
## 0.4.7
412
* Added optional `force` argument to `databricks_group` resource to ignore `cannot create group: Group with name X already exists.` errors and implicitly import the specific group into Terraform state, enforcing entitlements defined in the instance of resource ([#1066](https://github.com/databrickslabs/terraform-provider-databricks/pull/1066)).
513
* Added support to configure permissions for all MLflow models ([#1044](https://github.com/databrickslabs/terraform-provider-databricks/issues/1044)).

common/reflect_resource.go

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,16 @@ func handleSensitive(typeField reflect.StructField, schema *schema.Schema) {
122122
}
123123
}
124124

125+
func handleSuppressDiff(typeField reflect.StructField, v *schema.Schema) {
126+
tfTags := strings.Split(typeField.Tag.Get("tf"), ",")
127+
for _, tag := range tfTags {
128+
if tag == "suppress_diff" {
129+
v.DiffSuppressFunc = diffSuppressor(fmt.Sprintf("%v", v.Type.Zero()))
130+
break
131+
}
132+
}
133+
}
134+
125135
func getAlias(typeField reflect.StructField) string {
126136
tfTags := strings.Split(typeField.Tag.Get("tf"), ",")
127137
for _, tag := range tfTags {
@@ -198,12 +208,18 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc
198208
switch typeField.Type.Kind() {
199209
case reflect.Int, reflect.Int32, reflect.Int64:
200210
scm[fieldName].Type = schema.TypeInt
211+
// diff suppression needs type for zero value
212+
handleSuppressDiff(typeField, scm[fieldName])
201213
case reflect.Float64:
202214
scm[fieldName].Type = schema.TypeFloat
215+
// diff suppression needs type for zero value
216+
handleSuppressDiff(typeField, scm[fieldName])
203217
case reflect.Bool:
204218
scm[fieldName].Type = schema.TypeBool
205219
case reflect.String:
206220
scm[fieldName].Type = schema.TypeString
221+
// diff suppression needs type for zero value
222+
handleSuppressDiff(typeField, scm[fieldName])
207223
case reflect.Map:
208224
scm[fieldName].Type = schema.TypeMap
209225
case reflect.Ptr:

common/reflect_resource_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ type Address struct {
197197

198198
type Dummy struct {
199199
Enabled bool `json:"enabled" tf:"conflicts:workers"`
200-
Workers int `json:"workers,omitempty"`
200+
Workers int `json:"workers,omitempty" tf:"suppress_diff"`
201201
Description string `json:"description,omitempty"`
202202
Addresses []Address `json:"addresses,omitempty" tf:"max_items:10"`
203203
Unique []Address `json:"unique,omitempty" tf:"slice_set"`

common/version.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ package common
33
import "context"
44

55
var (
6-
version = "0.4.7"
6+
version = "0.4.8"
77
// ResourceName is resource name without databricks_ prefix
88
ResourceName contextKey = 1
99
// Provider is the current instance of provider

pools/resource_instance_pool.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,13 +42,13 @@ type InstancePool struct {
4242
InstancePoolID string `json:"instance_pool_id,omitempty" tf:"computed"`
4343
InstancePoolName string `json:"instance_pool_name"`
4444
MinIdleInstances int32 `json:"min_idle_instances,omitempty"`
45-
MaxCapacity int32 `json:"max_capacity,omitempty"`
45+
MaxCapacity int32 `json:"max_capacity,omitempty" tf:"suppress_diff"`
4646
IdleInstanceAutoTerminationMinutes int32 `json:"idle_instance_autotermination_minutes"`
4747
AwsAttributes *InstancePoolAwsAttributes `json:"aws_attributes,omitempty" tf:"force_new,suppress_diff"`
4848
AzureAttributes *InstancePoolAzureAttributes `json:"azure_attributes,omitempty" tf:"force_new,suppress_diff"`
4949
NodeTypeID string `json:"node_type_id" tf:"force_new"`
5050
CustomTags map[string]string `json:"custom_tags,omitempty" tf:"force_new"`
51-
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" tf:"force_new"`
51+
EnableElasticDisk bool `json:"enable_elastic_disk,omitempty" tf:"force_new,suppress_diff"`
5252
DiskSpec *InstancePoolDiskSpec `json:"disk_spec,omitempty" tf:"force_new"`
5353
PreloadedSparkVersions []string `json:"preloaded_spark_versions,omitempty" tf:"force_new"`
5454
PreloadedDockerImages []clusters.DockerImage `json:"preloaded_docker_images,omitempty" tf:"force_new,slice_set,alias:preloaded_docker_image"`

scripts/gcp-integration/main.tf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ resource "databricks_mws_workspaces" "this" {
4343
project_id = data.google_client_config.current.project
4444
}
4545
}
46+
token {}
4647
}
4748

4849
// google_service_account configured via env
@@ -76,6 +77,11 @@ output "databricks_host" {
7677
value = databricks_mws_workspaces.this.workspace_url
7778
}
7879

80+
output "databricks_token" {
81+
value = databricks_mws_workspaces.this.token[0].token_value
82+
sensitive = true
83+
}
84+
7985
output "cloud_env" {
8086
value = "gcp"
8187
}

0 commit comments

Comments
 (0)