Skip to content

Commit e755e2a

Browse files
Revert "Added customize diff for params field in google_bigquery_data_transfer_config (#6678)" (#6737) (#4823)
This reverts commit 59a5d687c347a3b6d33636620c9057d3145e506a. Signed-off-by: Modular Magician <[email protected]> Signed-off-by: Modular Magician <[email protected]>
1 parent f102cc3 commit e755e2a

File tree

3 files changed

+3
-93
lines changed

3 files changed

+3
-93
lines changed

.changelog/6737.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
```release-note:none
2+
```

google-beta/resource_bigquery_data_transfer_config.go

Lines changed: 1 addition & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import (
2222
"strings"
2323
"time"
2424

25-
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff"
2625
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
2726
)
2827

@@ -39,31 +38,6 @@ func sensitiveParamCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v
3938
return nil
4039
}
4140

42-
func paramsCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v interface{}) error {
43-
old, new := diff.GetChange("params")
44-
oldParams := old.(map[string]interface{})
45-
newParams := new.(map[string]interface{})
46-
var err error
47-
48-
if oldParams["data_path_template"] != nil && newParams["data_path_template"] != nil && oldParams["data_path_template"].(string) != newParams["data_path_template"].(string) {
49-
err = diff.ForceNew("params")
50-
if err != nil {
51-
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
52-
}
53-
return nil
54-
}
55-
56-
if oldParams["destination_table_name_template"] != nil && newParams["destination_table_name_template"] != nil && oldParams["destination_table_name_template"].(string) != newParams["destination_table_name_template"].(string) {
57-
err = diff.ForceNew("params")
58-
if err != nil {
59-
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
60-
}
61-
return nil
62-
}
63-
64-
return nil
65-
}
66-
6741
func resourceBigqueryDataTransferConfig() *schema.Resource {
6842
return &schema.Resource{
6943
Create: resourceBigqueryDataTransferConfigCreate,
@@ -81,7 +55,7 @@ func resourceBigqueryDataTransferConfig() *schema.Resource {
8155
Delete: schema.DefaultTimeout(20 * time.Minute),
8256
},
8357

84-
CustomizeDiff: customdiff.All(sensitiveParamCustomizeDiff, paramsCustomizeDiff),
58+
CustomizeDiff: sensitiveParamCustomizeDiff,
8559

8660
Schema: map[string]*schema.Schema{
8761
"data_source_id": {

google-beta/resource_bigquery_data_transfer_config_test.go

Lines changed: 0 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) {
1919
"service_account": testAccBigqueryDataTransferConfig_scheduledQuery_with_service_account,
2020
"no_destintation": testAccBigqueryDataTransferConfig_scheduledQuery_no_destination,
2121
"booleanParam": testAccBigqueryDataTransferConfig_copy_booleanParam,
22-
"update_params": testAccBigqueryDataTransferConfig_force_new_update_params,
2322
}
2423

2524
for name, tc := range testCases {
@@ -169,45 +168,6 @@ func testAccBigqueryDataTransferConfig_copy_booleanParam(t *testing.T) {
169168
})
170169
}
171170

172-
func testAccBigqueryDataTransferConfig_force_new_update_params(t *testing.T) {
173-
random_suffix := randString(t, 10)
174-
175-
vcrTest(t, resource.TestCase{
176-
PreCheck: func() { testAccPreCheck(t) },
177-
Providers: testAccProviders,
178-
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
179-
Steps: []resource.TestStep{
180-
{
181-
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "old", "old"),
182-
},
183-
{
184-
ResourceName: "google_bigquery_data_transfer_config.update_config",
185-
ImportState: true,
186-
ImportStateVerify: true,
187-
ImportStateVerifyIgnore: []string{"location"},
188-
},
189-
{
190-
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "old"),
191-
},
192-
{
193-
ResourceName: "google_bigquery_data_transfer_config.update_config",
194-
ImportState: true,
195-
ImportStateVerify: true,
196-
ImportStateVerifyIgnore: []string{"location"},
197-
},
198-
{
199-
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "new"),
200-
},
201-
{
202-
ResourceName: "google_bigquery_data_transfer_config.update_config",
203-
ImportState: true,
204-
ImportStateVerify: true,
205-
ImportStateVerifyIgnore: []string{"location"},
206-
},
207-
},
208-
})
209-
}
210-
211171
func testAccCheckBigqueryDataTransferConfigDestroyProducer(t *testing.T) func(s *terraform.State) error {
212172
return func(s *terraform.State) error {
213173
for name, rs := range s.RootModule().Resources {
@@ -409,29 +369,3 @@ resource "google_bigquery_data_transfer_config" "copy_config" {
409369
}
410370
`, random_suffix, random_suffix, random_suffix)
411371
}
412-
413-
func testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, path, table string) string {
414-
return fmt.Sprintf(`
415-
resource "google_bigquery_dataset" "dataset" {
416-
dataset_id = "tf_test_%s"
417-
friendly_name = "foo"
418-
description = "bar"
419-
location = "US"
420-
}
421-
422-
resource "google_bigquery_data_transfer_config" "update_config" {
423-
display_name = "tf-test-%s"
424-
data_source_id = "google_cloud_storage"
425-
destination_dataset_id = google_bigquery_dataset.dataset.dataset_id
426-
location = google_bigquery_dataset.dataset.location
427-
428-
params = {
429-
data_path_template = "gs://bq-bucket-%s-%s/*.json"
430-
destination_table_name_template = "the-table-%s-%s"
431-
file_format = "JSON"
432-
max_bad_records = 0
433-
write_disposition = "APPEND"
434-
}
435-
}
436-
`, random_suffix, random_suffix, random_suffix, path, random_suffix, table)
437-
}

0 commit comments

Comments
 (0)